summaryrefslogtreecommitdiff
path: root/timings
diff options
context:
space:
mode:
authorWilliam Blevins <wblevins@gmail.com>2015-03-23 22:48:48 -0400
committerWilliam Blevins <wblevins@gmail.com>2015-03-23 22:48:48 -0400
commitc6d81e0932bbd7b35a2d0c20db7fcf1f773048dc (patch)
tree033ef7e45dfe205ccdfd308938c1d7802f03d0ef /timings
downloadscons-git-c6d81e0932bbd7b35a2d0c20db7fcf1f773048dc.tar.gz
Updated jni header directory expectations for tests.
My openjdk install link/dir also contained the architecture. Example: RPM package java-1.7.0-openjdk-devel-1.7.0.75-2.5.4.0.el6_6.x86_64 created link /usr/lib/jvm/java-1.7.0-openjdk.x86_64 Affected tests: test/Java/multi-step.py test/Java/swig-dependencies.py
Diffstat (limited to 'timings')
-rw-r--r--timings/CPPPATH/SConstruct31
-rw-r--r--timings/CPPPATH/TimeSCons-run.py55
-rw-r--r--timings/CPPPATH/config.js3
-rw-r--r--timings/CPPPATH/foo.c10
-rw-r--r--timings/CPPPATH/include/foo.h5
-rw-r--r--timings/ElectricCloud/TimeSCons-run.py94
-rw-r--r--timings/ElectricCloud/config.js3
-rwxr-xr-xtimings/ElectricCloud/genscons.pl559
-rw-r--r--timings/JTimer/SConstruct39
-rw-r--r--timings/JTimer/TimeSCons-run.py64
-rw-r--r--timings/JTimer/config.js3
-rw-r--r--timings/Java/SConstruct25
-rw-r--r--timings/Java/TimeSCons-run.py55
-rw-r--r--timings/Java/config.js3
-rw-r--r--timings/README.txt178
-rw-r--r--timings/SCons_Bars.py123
-rw-r--r--timings/changelog.html204
-rw-r--r--timings/graph.html413
-rw-r--r--timings/hundred/SConstruct35
-rw-r--r--timings/hundred/TimeSCons-run.py54
-rw-r--r--timings/hundred/config.js3
-rw-r--r--timings/index.html199
-rw-r--r--timings/js/common.js96
-rw-r--r--timings/js/coordinates.js125
-rw-r--r--timings/js/plotter.js336
25 files changed, 2715 insertions, 0 deletions
diff --git a/timings/CPPPATH/SConstruct b/timings/CPPPATH/SConstruct
new file mode 100644
index 000000000..14cc76926
--- /dev/null
+++ b/timings/CPPPATH/SConstruct
@@ -0,0 +1,31 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+dir_count = int(ARGUMENTS['DIR_COUNT'])
+
+inc_list = [ Dir('inc_%04d' % t) for t in range(dir_count) ]
+inc_list.append(Dir('include'))
+
+env = Environment(CPPPATH = inc_list)
+
+env.Object( 'foo.c' )
diff --git a/timings/CPPPATH/TimeSCons-run.py b/timings/CPPPATH/TimeSCons-run.py
new file mode 100644
index 000000000..b26fafcf2
--- /dev/null
+++ b/timings/CPPPATH/TimeSCons-run.py
@@ -0,0 +1,55 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+"""
+This configuration times searching long lists of CPPPATH directories.
+
+We create $DIR_COUNT on-disk directories. A single checked-in .h file
+exists in the 'include' directory. The SConstruct sets CPPPATH to a
+list of Dir Nodes for the created directories, followed by 'include'.
+A checked-in .c file #includes the .h file to be found in the last
+directory in the list.
+"""
+
+import TestSCons
+
+# Full-build time of just under 10 seconds on ubuntu-timings slave,
+# as determined by bin/calibrate.py on 9 December 2009:
+#
+# run 1: 2.235: DIR_COUNT=50
+# run 2: 3.976: DIR_COUNT=223
+# run 3: 7.353: DIR_COUNT=560
+# run 4: 9.569: DIR_COUNT=761
+# run 5: 9.353: DIR_COUNT=761
+# run 6: 9.972: DIR_COUNT=813
+# run 7: 9.930: DIR_COUNT=813
+# run 8: 9.983: DIR_COUNT=813
+
+test = TestSCons.TimeSCons(variables={'DIR_COUNT':813})
+
+for d in range(test.variables['DIR_COUNT']):
+ test.subdir('inc_%04d' % d)
+
+test.main()
+
+test.pass_test()
diff --git a/timings/CPPPATH/config.js b/timings/CPPPATH/config.js
new file mode 100644
index 000000000..d5ddef388
--- /dev/null
+++ b/timings/CPPPATH/config.js
@@ -0,0 +1,3 @@
+var Config = {
+ 'title': "timings/CPPPATH",
+};
diff --git a/timings/CPPPATH/foo.c b/timings/CPPPATH/foo.c
new file mode 100644
index 000000000..c3c3e4bfa
--- /dev/null
+++ b/timings/CPPPATH/foo.c
@@ -0,0 +1,10 @@
+/*
+__COPYRIGHT__
+*/
+
+#include "foo.h"
+void
+foo(void)
+{
+ ;
+}
diff --git a/timings/CPPPATH/include/foo.h b/timings/CPPPATH/include/foo.h
new file mode 100644
index 000000000..e3c926964
--- /dev/null
+++ b/timings/CPPPATH/include/foo.h
@@ -0,0 +1,5 @@
+/*
+__COPYRIGHT__
+*/
+
+#define FOO 1
diff --git a/timings/ElectricCloud/TimeSCons-run.py b/timings/ElectricCloud/TimeSCons-run.py
new file mode 100644
index 000000000..be75d1211
--- /dev/null
+++ b/timings/ElectricCloud/TimeSCons-run.py
@@ -0,0 +1,94 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+"""
+This configuration comes from the following blog article:
+
+"How scalable is SCons? The Electric Cloud Blog"
+http://blog.electric-cloud.com/2010/03/08/how-scalable-is-scons/
+
+ The test build consists of (a lot of) compiles and links. Starting
+ from the bottom, we have N C files each with a unique associated
+ header file. The C files and headers were spread across
+ N/500 directories in order to eliminate filesystem scalability
+ concerns. Both the C files and the header files are trivial: the
+ header only includes stdio.h; the C file includes the associated
+ header and a second, shared header, then defines a trivial
+ function. Objects are collected into groups of 20 and stored into
+ a standard archive. Every 20th object is linked into an executable
+ along with the archive.
+
+The original ElectricCloud implementation is captured in genscons.pl,
+and we just call that script to generate the configuration in a
+"sconsbld" subdirectory.
+"""
+
+import TestSCons
+
+# The values here were calibrated by hand on the ubuntu-timings slave,
+# because the configurations generated by genscons.pl only work if
+# the FILES_PER_DIRECTORY value is a multiple of the COMPILES_GROUPED
+# value, and it didn't seem worth automating that manipulation.
+#
+# The key value below is FILES_PER_DIRECTORY; the other values match
+# the default from the genscons.pl file. With the values below,
+# it creates a two-deep hierarchy of a single directory with three
+# subdirectories. Each directory (both parent and subdirectories)
+# contains sixty source files (each of which includes a .h file)
+# that are built into three libraries containing twenty object files
+# each, which are then linked into executables.
+#
+# As of r5143 on 17 August 2010, a value of 60 FILES_PER_DIRECTORY
+# performs a full build in 82.5 seconds on the ubuntu-timings slave.
+# That's more than our "normal" target of 10 seconds or so for the
+# full build, but building anything less than three libraries per
+# directory feels like it makes the test too trivial.
+
+import os
+
+test = TestSCons.TimeSCons(variables={
+ 'NUMBER_OF_LEVELS' : 2,
+ 'DIRECTORIES_PER_LEVEL' : 3,
+ 'FILES_PER_DIRECTORY' : 60,
+ 'LOOKUPS_PER_SOURCE' : 2,
+ 'COMPILES_GROUPED' : 20,
+},
+ calibrate=['FILES_PER_DIRECTORY'])
+
+arguments = [
+ '-l %s' % test.variables['NUMBER_OF_LEVELS'],
+ '-d %s' % test.variables['DIRECTORIES_PER_LEVEL'],
+ '-f %s' % test.variables['FILES_PER_DIRECTORY'],
+ '-g %s' % test.variables['COMPILES_GROUPED'],
+ '-u %s' % test.variables['LOOKUPS_PER_SOURCE'],
+]
+
+test.run(program=test.workpath('genscons.pl'), arguments=' '.join(arguments))
+# This print is nott for debugging, leave it alone!
+# We want to display the output from genscons.pl's generation the build
+# configuration, so the buildbot logs contain more info.
+print test.stdout()
+
+test.main(chdir='sconsbld')
+
+test.pass_test()
diff --git a/timings/ElectricCloud/config.js b/timings/ElectricCloud/config.js
new file mode 100644
index 000000000..d5ddef388
--- /dev/null
+++ b/timings/ElectricCloud/config.js
@@ -0,0 +1,3 @@
+var Config = {
+ 'title': "timings/CPPPATH",
+};
diff --git a/timings/ElectricCloud/genscons.pl b/timings/ElectricCloud/genscons.pl
new file mode 100755
index 000000000..02886c032
--- /dev/null
+++ b/timings/ElectricCloud/genscons.pl
@@ -0,0 +1,559 @@
+#!/usr/bin/perl
+#
+# genscons.pl
+#
+# This script generates a build tree with $ndirs + 1 directories, containing
+# $nfils source files each, and both SConstruct files and non-recursive
+# Makefiles to build the tree.
+#
+# Copyright (c) 2010 Electric Cloud, Inc.
+# All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+# * Neither the name of Electric Cloud nor the names of its employees may
+# be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+# These settings will generate 2,000 total C files, grouped in blocks of 20,
+# each of which does a couple of #includes.
+
+$nlvls = 2; $ndirs = 3; $nfils = 500; $nlups = 2; $group = 20;
+
+$rmdel = "rm -f";
+$OBJ = ".o";
+
+# Check Variables settings
+
+#if ( ($nfils % $group) != 0) {
+# die "ERROR: The number of files ($nfils) must be a multiple of the group size ($group)";
+#}
+
+sub init() {
+ use Getopt::Std;
+ my $opt_string = 'd:f:g:l:u:h';
+ getopts ( "$opt_string", \%opt) or usage ();
+
+ &usage() if $opt{h};
+
+ $ndirs = $opt{d} if $opt{d};
+ $nfils = $opt{f} if $opt{f};
+ $group = $opt{g} if $opt{g};
+ $nlvls = $opt{l} if $opt{l};
+ $nlups = $opt{u} if $opt{u};
+
+ return 0;
+}
+
+sub usage () {
+ print STDERR << "EOF";
+
+usage: $0 [-l Levels] [-d Dirs] [-f Files] [-g Grouping] [-u Lookups]
+ [-h]
+
+-l Levels : number of levels of directories (default $nlvls)
+-d Dirs : number of directories at each level (default $ndirs)
+-f Files : number of source files per directory (default $nfils)
+-g Grouping : compile in groups of Grouping files (default $group)
+-u Lookups : number of lookups per source file (default $nlups)
+
+-h : this help message
+
+You can edit the default values in genscons.pl
+
+EOF
+ exit;
+}
+
+# fmt
+#
+# Adds commas to long numbers to make them more readable.
+sub fmt {
+ my ($value) = @_;
+ my $running = 1;
+ while ($running) {
+ $value =~ s/([0-9])([0-9]{3})(?![0-9])/\1,\2/g;
+ $running = ($1 =~ /[0-9]/);
+ }
+ return $value;
+}
+
+# gen_incfile
+#
+# Generate a generic include file to keep the compiler busy.
+sub gen_incfile {
+ my ($basedir, $filename, $idx) = @_;
+
+ open (INC, "> $filename") || die "Cannot open $filename for output.";
+ print INC "#ifndef $filname[$idx]\n"
+ . "#define $filname[$idx] \"$basedir\"\n\n"
+ . "#include \"stdio.h\"\n\n";
+ print INC "#endif\n";
+ close (INC);
+}
+
+# gen_cfile
+#
+# Generate a distinct C file to keep the compiler busy.
+sub gen_cfile {
+ my ($basedir, $filename, $idx) = @_;
+
+ open (CFILE, "> $basedir/$filename")
+ || die "Cannot open $basedir/$filename for output";
+
+ $buff = "#include <stdlib.h>\n";
+ $buff .= "#include <$filname[$idx].h>\n";
+
+ $buff .= "#include <omega.h>\n";
+ print CFILE $buff;
+
+ if ($group == 1) {
+ print CFILE "main (int argc, char * argv[]) {\n"
+ . "\tint i, mb_out;\n"
+ . "\tprintf (\"I am $basedir/%s\\n\", \"$filname[$idx]\""
+ . ");\n"
+ . "\treturn (0);\n}\n";
+ }
+ elsif ( ($group - ($fil[$idx] % $group)) == 1) {
+ print CFILE "printr_$filname[$idx] (char * fname) {\n"
+ . " printf (\"I am $basedir/%s\\n\", fname);\n"
+ . " return (0);\n}\n";
+ }
+ elsif ( ($fil[$idx] % $group) == 0) {
+ $idx2 = $fil[$idx] + 1;
+ print CFILE "extern int printr_$file[$idx2] (char * fname);\n"
+ . "main (int argc, char * argv[]) {\n"
+ . "\tint i, mb_out;\n";
+
+ print CFILE "\tprintr_$file[$idx2] (\"$filname[$idx]\");\n"
+ . "\n"
+ . "\tmb_out = 0;\n"
+ . "\tif (argc > 1) {\n"
+ . "\t\tmb_out = atoi (argv[1]);\n"
+ . "\t}\n"
+ . "\tfor (i = 0; i < (mb_out * 16000); i++) {\n"
+ . "\t\tprintf (\"%07d 9a123456789b123456789c12345"
+ . "6789d123456789e123456789f12345678\\n\", i);\n"
+ . "\t}\n"
+ . "\texit (0);\n}\n";
+ }
+ else {
+ $idx2 = $fil[$idx] + 1;
+ print CFILE "extern int printr_$file[$idx2] (char * fname);\n"
+ . "printr_$filname[$idx] (char * fname) {\n"
+ . " printr_$file[$idx2] (fname);\n"
+ . " return (0);\n}\n";
+ }
+ close (CFILE);
+}
+
+# mkdirs
+#
+# Recursive function for generating directories full of files to build, and
+# the makefiles that go with them.
+#
+sub mkdirs {
+ my ($idx, $basedir, $thisLvl) = @_;
+
+ if ( ! -d $basedir ) {
+ mkdir $basedir || die "Cannot create folder $basedir: $!";
+ }
+
+ $relpath[$idx] = substr ($basedir, 8); # assumed top dir is "sconsbld"
+ if ("$relpath[$idx]" eq "") {
+ $relpath[$idx] = ".";
+ $basestr = $basedir;
+ $foo = "";
+ $libdir = ".";
+ }
+ else {
+ $basestr = $relpath[$idx];
+ $basestr =~ s|/|_|g;
+ $foo = substr($basedir,9) . "/";
+ $libdir = substr($basedir,9);
+ }
+ $bstr[$idx] = $basestr;
+
+ $dirstr = $basedir;
+ $dirstr =~ s|/|_|g;
+
+# $basedir is $relpath[$idx] with "sconsbld/" prepended so
+# $dirstr is $basestr with "sconsbld_" prepended.
+
+ $cdstr = ".";
+ for ($cdidx = 1; $cdidx < $thisLvl; $cdidx++) { $cdstr .= "/.."; }
+
+ $thissc[$idx] = "$basedir/SConstruct";
+ $thismk[$idx] = "$basedir/Makefile";
+ $fzero[$idx] = "$basedir/file0";
+
+ open (SC, "> $thissc[$idx]")
+ || die "Cannot open $thissc[$idx] for output: $!";
+ open (MK, "> $thismk[$idx]")
+ || die "Cannot open $thismk[$idx] for output: $!";
+
+ print SC "import os\n"
+ . "env = Environment(ENV = os.environ)\n\n";
+
+ my $cfgpath = "";
+ for (my $junk = 1; $junk < $thisLvl; $junk++) { $cfgpath .= "../"; }
+
+ my $arkive = "archive";
+
+ if ($thisLvl == 1) { $mysubdir = "."; }
+ else { $mysubdir = substr ($basedir, 8); }
+
+
+ if (index ($basedir, "/") > 0) {
+ @pieces = split (/\//, $basedir);
+ $fileSuffix = "";
+ for (my $ii =0; $ii <= $#pieces; $ii++) {
+ $fileSuffix .= "_" . $pieces[$ii];
+ }
+ } else {
+ $fileSuffix = "_" . $basedir;
+ }
+
+ for (my $lupsIdx = 0; $lupsIdx < $nlups; $lupsIdx++) {
+ printf SC ("env.Append (CPPPATH = ['./lup%03d$fileSuffix'])\n",
+ $lupsIdx);
+ if ($lupsIdx == 0) {
+ $eq = "=";
+ } else {
+ $eq = "+=";
+ }
+ printf MK ("${foo}%%.o: CPPFLAGS $eq -I${foo}lup%03d$fileSuffix\n",
+ $lupsIdx);
+ }
+ print SC "env.Append (LIBPATH = ['.'])\n\n";
+ print MK "${foo}%: LDFLAGS = -L$libdir\n\n";
+
+ if ($thisLvl == 1) {
+ print SC "\n"
+ . "env.Help(\"\"\"\n"
+ . "This build has parameters:\n"
+ . " number of levels = $nlvls\n"
+ . " directories/level = $ndirs\n"
+ . " cfiles/directory = $nfils\n"
+ . " lookups/source = $nlups\n"
+ . " compiles grouped = $group\n"
+ . "\"\"\")\n";
+
+ print MK "${foo}%.a:\n";
+ print MK "\tar rc \$@ \$^\n";
+ print MK "\tranlib \$@\n\n";
+ print MK "%.o: %.c\n";
+ print MK "\t\$(CC) -MMD -o \$@ -c \$(CPPFLAGS) \$<\n\n";
+ print MK "%: %.o\n";
+ print MK "\t\$(CC) -o \$@ \$< \$(LDFLAGS) -l\$(notdir \$@)\n\n";
+ print MK "CC=gcc\n\n";
+ print MK "all:\n\t\@ps -eo vsz,rss,comm | fgrep make\n\n";
+ }
+ print SC "\n";
+ print MK "\n";
+
+ # Create include directories for doing additional lookups.
+ for (my $ii = 0; $ii < $nlups; $ii++) {
+ $lupDir = sprintf ("lup%03d$fileSuffix", $ii);
+ mkdir "$basedir/$lupDir" || die "Couldn't create $basedir/$lupDir: $!";
+ $totald++;
+ }
+
+ $scfcc = "";
+ $scfar = "";
+ $mkfcc = "";
+ $mkfar = "";
+
+ ###
+ ### generate the .c files and the .h files they include.
+ ### Also generate the corresponding Makefile commands.
+ ###
+ for (my $filidx = 0; $filidx < $nfils; $filidx++) {
+ $file[$filidx] = sprintf ("f%05d$fileSuffix", $filidx);
+ }
+ for ($fil[$idx] = 0; $fil[$idx] < $nfils; $fil[$idx]++) {
+ $filname[$idx] = "$file[$fil[$idx]]";
+
+ $nextnum = substr ($filname[$idx], 1, 5);
+
+ if ($group == 1) {
+#
+# Even when there are no groups, pre-compiled headers
+# still apply.
+#
+ print SC "env.Program ('$filname[$idx].c')\n\n";
+ } # end of $group == 1
+#
+# Compile source files in groups.
+# This removes unique lookups but adds some :: rules.
+#
+ else {
+ if ( ($fil[$idx] % $group) == 0) {
+ if ("$scfcc" ne "") {
+ print SC "$scfcc\n$scfar\n\n";
+ $scfcc = "";
+ $scfar = "";
+ }
+ if ("$mkfcc" ne "") {
+ print MK "$mkfcc\n$mkfar\n\n";
+ $mkfcc = "";
+ $mkfar = "";
+ }
+
+ $groupFilename = "$filname[$idx]";
+ $nextnum = substr ($filname[$idx], 1, 5);
+
+ $scfcc = "env.Program('$filname[$idx]',\n"
+ . "\tLIBS=['$filname[$idx]'])\n";
+
+ $scfar = "env.Library ('$filname[$idx]',\n"
+ . "\t['$filname[$idx].c'";
+
+ $mkfcc = "TARGETS += ${foo}$filname[$idx]\n${foo}$filname[$idx]: ${foo}$filname[$idx].o ${foo}lib$filname[$idx].a\n";
+ $mkfar = "${foo}lib$filname[$idx].a: ${foo}$filname[$idx].o";
+
+ print MK "SRCS += ${foo}$filname[$idx].c\n";
+ $tmpfnam = $filname[$idx];
+ for ($filei = 1; $filei < $group; $filei++) {
+ $tmpfnum = sprintf ("%05d", $nextnum + $filei);
+ substr ($tmpfnam, 1, 5) = $tmpfnum;
+
+ $scfar .= ",\n\t '$tmpfnam.c'";
+ $mkfar .= " ${foo}$tmpfnam.o";
+ print MK "SRCS += ${foo}$tmpfnam.c\n";
+ }
+ $scfar .= "])\n\n";
+ $mkfar .= "\n";
+ }
+
+ } # end of handling of compiles for $group > 1
+
+ gen_incfile($basedir, "$basedir/$lupDir/$filname[$idx].h", $idx);
+
+ if ($fil[$idx] == 0) {
+ open (INC, "> $basedir/$lupDir/omega.h")
+ || die "Cannot open $basedir/$lupDir/omega.h for output.";
+ print INC "// comment in dummy file.\n";
+ close (INC);
+ }
+
+ gen_cfile($basedir, "$filname[$idx].c", $idx);
+ } # end of generation of source files and header files
+
+ if ($group > 1 && "$scfcc" ne "") {
+#
+# create makefile commands for the leftover files
+#
+ print SC "$scfcc\n$scfar\n\n";
+ print MK "$mkfcc\n$mkfar\n\n";
+ }
+
+ close (SC);
+ close (MK);
+
+ # Recurse and create more subdirectories and their contents.
+ if ($thisLvl < $nlvls) {
+ $allsubs[$idx] = "";
+ for ($dir[$idx] = 0; $dir[$idx] < $ndirs; $dir[$idx]++) {
+ $dirname[$idx] = sprintf ("d${thisLvl}_%d", $dir[$idx]);
+ $allsubs[$idx] .= "$dirname[$idx].subdir ";
+#
+# divide the subdirectories into 2 lists
+# The two lists are/can be treated differently in Makefile.util
+#
+ if ($dir[$idx] < ($ndirs / 2)) {
+ if ("$dirs1[$idx]" eq "") { $dirs1[$idx] = "$dirname[$idx]"; }
+ elsif (index ($dirs1[$idx], $dirname[$idx]) < 0) {
+ $dirs1[$idx] .= " $dirname[$idx]";
+ }
+ }
+ else {
+ if ("$dirs2[$idx]" eq "") { $dirs2[$idx] = "$dirname[$idx]"; }
+ elsif (index ($dirs2[$idx], $dirname[$idx]) < 0) {
+ $dirs2[$idx] .= " $dirname[$idx]";
+#
+# The preceding elsif should really just be
+# "else" but when nlvls > 2, you start getting repetition
+# of directory names in $dirs1[$idx] and $dirs2[$idx].
+# Rather than figure out where the duplication is coming
+# from, just prevent it.
+#
+ }
+ }
+
+ if ( ! -d "$basedir/$dirname[$idx]") {
+ mkdir "$basedir/$dirname[$idx]" ||
+ die "Couldn't create $basedir/$dirname[$idx]: $!";
+ $totald++;
+ }
+
+ &mkdirs ($idx + 1, "$basedir/$dirname[$idx]", $thisLvl + 1);
+ if ($thisLvl == 1) {
+ print "Finished folder $dirname[$idx] in $basedir at "
+ . `date`;
+ }
+ }
+
+ }
+
+ if ($thisLvl < $nlvls) {
+ open (SC, ">> $thissc[$idx]")
+ || die "Cannot open $thissc[$idx] for append: $!";
+ open (MK, ">> $thismk[$idx]")
+ || die "Cannot open $thismk[$idx] for append: $!";
+
+ print SC "SConscript([";
+
+ if (index ($dirs1[$idx], " ") > 0) {
+ @subdirs = split (/ /, $dirs1[$idx]);
+ for ($i = 0; $i <= $#subdirs; $i++) {
+ print SC "'$subdirs[$i]/SConstruct',\n\t";
+ print MK "include $subdirs[$i]/Makefile\n";
+ }
+ }
+ else {
+ print SC "'$dirs1[$idx]/SConstruct',\n\t";
+ print MK "include $dirs1[$idx]/Makefile\n";
+ }
+ if (index ($dirs2[$idx], " ") > 0) {
+ @subdirs = split (/ /, $dirs2[$idx]);
+ for ($i = 0; $i < $#subdirs; $i++) {
+ print SC "'$subdirs[$i]/SConstruct',\n\t";
+ print MK "include $subdirs[$i]/Makefile\n";
+ }
+ print SC "'$subdirs[$#subdirs]/SConstruct'";
+ print MK "include $subdirs[$#subdirs]/Makefile\n";
+ }
+ else {
+ print SC "'$dirs2[$idx]/SConstruct'";
+ print MK "include $dirs2[$idx]/Makefile\n";
+ }
+ print SC "])\n\n";
+ print SC "\n";
+ print MK "NUL=\n";
+ print MK "SPACE=\$(NUL) \$(NUL)\n";
+ print MK "define nl\n\$(SPACE)\n\$(SPACE)\nendef\n\n";
+ print MK "all: \$(TARGETS)\n\n";
+ print MK "clean:\n";
+ print MK "\t\$(foreach tgt,\$(TARGETS),rm -f \$(tgt)\$(nl))\n";
+ print MK "\tfor n in d1*; do rm -f \$\$n/*.o ; rm -f \$\$n/*.a;done\n";
+ print MK "\trm -f *.o ; rm -f *.a\n\n";
+ print MK "-include \$(SRCS:.c=.d)";
+
+ close (SC);
+ close (MK);
+ }
+ return 0;
+}
+
+$basedir = "sconsbld";
+if ( ! -d $basedir) { mkdir $basedir || die "Couldn't create $basedir: $!"; }
+
+&init ();
+
+$numdirs = 0; # dirs other than include dirs
+for ($i = 0; $i < $nlvls; $i++) { $numdirs += ($ndirs ** $i); }
+
+$totldirs = $numdirs * ($nlups + 1); # dirs including include dirs
+
+# total = ( .c ) + ( .h ) + mkfiles + Makefile.util
+# + Makefile.cfg
+# + readme
+# + omega.h
+# + Makefile.clean
+
+$totlfils = ($nfils * $numdirs) + ($nfils * $numdirs)
+ + $numdirs + 3 + $numdirs;
+
+$totlobjs = $nfils * $numdirs;
+$totlexes = $numdirs * ($nfils / $group);
+
+$totllups = $nfils * $numdirs * $nlups / $group;
+$allfiles = $totlfils + $totlobjs + $totlexes;
+
+# one rule for each group plus overhead of 10/makefile
+$nrules = ($numdirs * $nfils / $group) + ($numdirs * 10);
+
+$txt1 = "Number of levels = $nlvls\n"
+ . "Number of dirs / level = $ndirs\n"
+ . "Number of source files / dir = $nfils\n"
+ . "Number of lookups / source file = $nlups\n"
+ . "Number of compiles grouped = $group\n";
+print $txt1;
+
+print $vartxt;
+ $numMakefiles = 1;
+
+$txt2 = "Expecting:\n"
+ . "\tdirectories: " . fmt($totldirs) . "\n"
+ . "\tsource files: " . fmt($numdirs * $nfils) . "\n"
+ . "\tinclude files: " . fmt($numdirs * ($nfils + 1)) . "\n"
+ . "\tmakefiles: " . fmt($numdirs * $numMakefiles)
+ . " ($numMakefiles per directory)\n"
+ . "\ttotal files: " . fmt($totlfils) . "\n"
+ . "\tlook-ups: >= " . fmt($totllups) . "\n"
+ . "\trules: >= " . fmt($nrules) . "\n";
+print $txt2;
+
+$txt3 = "When the build runs, " . fmt($totlobjs) . " object files & "
+ . fmt($totlexes) . " executable(s)"
+ . "\nwill be created, for a total of " . fmt($allfiles) . " files.\n";
+print $txt3;
+
+# Using local archives the number of conflicts is about the number of compiles
+# which equals the number of archive writes.
+#
+
+if (-d $basedir) {
+ print "Cleaning up from a previous run of this perl script.\n\n";
+ system ("rm -rf $basedir/*");
+}
+
+###
+### Generate README.txt
+###
+$readme = "$basedir/README.txt";
+open (README, "> $readme") || die "Cannot open $readme for output.";
+
+$txt = "\nStarted at " . `date` . "\n";
+print $txt;
+print README $txt
+ . $vartxt
+ . $txt1
+ . $txt2
+ . $txt3 || die "Cannot write txt, vartxt, txt1 etc to README";
+
+###
+### Do the heavy lifting
+###
+
+print "Start writing new files at " . `date` . "......\n";
+
+$basedir0 = $basedir;
+&mkdirs (0, $basedir, 1);
+
+###
+### Summarize the results to the README and the console
+###
+
+$txt = "\nFile creation ended at " . `date` . "\n";
+print $txt; print README $txt || die "Cannot print txt to README";
+
+close (README);
diff --git a/timings/JTimer/SConstruct b/timings/JTimer/SConstruct
new file mode 100644
index 000000000..95764a63a
--- /dev/null
+++ b/timings/JTimer/SConstruct
@@ -0,0 +1,39 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+target_count = int(ARGUMENTS['TARGET_COUNT'])
+
+env = Environment()
+
+def write_file( env, target, source ):
+ path_target = env.File( target )[0].path
+ outfile = open( path_target, 'w' )
+ outfile.write( 'junk' )
+ outfile.close()
+
+list = []
+for i in range( target_count ):
+ target = 'target_%03d' % i
+ env.Command( target, [], write_file )
+ env.Depends( target, list )
+ list.append( target )
diff --git a/timings/JTimer/TimeSCons-run.py b/timings/JTimer/TimeSCons-run.py
new file mode 100644
index 000000000..7fe1bf408
--- /dev/null
+++ b/timings/JTimer/TimeSCons-run.py
@@ -0,0 +1,64 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+"""
+This configuration is for timing how we evaluate long chains of
+dependencies, specifically when -j is used.
+
+We set up a chain of $TARGET_COUNT targets that get built from a
+Python function action with no source files (equivalent to "echo junk >
+$TARGET"). Each target explicitly depends on the next target in turn,
+so the Taskmaster will do a deep walk of the dependency graph.
+
+This test case was contributed by Kevin Massey. Prior to revision 1468,
+we had a serious O(N^2) problem in the Taskmaster when handling long
+dependency chains like this. That was fixed by adding reference counts
+to the Taskmaster so it could be smarter about not re-evaluating Nodes.
+"""
+
+import TestSCons
+
+# Full-build time of just under 10 seconds on ubuntu-timings slave,
+# as determined by bin/calibrate.py on 9 December 2009:
+#
+# run 1: 3.211: TARGET_COUNT=50
+# run 2: 11.920: TARGET_COUNT=155
+# run 3: 9.182: TARGET_COUNT=130
+# run 4: 10.185: TARGET_COUNT=141
+# run 5: 9.945: TARGET_COUNT=138
+# run 6: 10.035: TARGET_COUNT=138
+# run 7: 9.898: TARGET_COUNT=137
+# run 8: 9.840: TARGET_COUNT=137
+# run 9: 10.054: TARGET_COUNT=137
+# run 10: 9.747: TARGET_COUNT=136
+# run 11: 9.778: TARGET_COUNT=136
+# run 12: 9.743: TARGET_COUNT=136
+#
+# The fact that this varies so much suggests that it's pretty
+# non-deterministic, which makes sense for a test involving -j.
+
+test = TestSCons.TimeSCons(variables={'TARGET_COUNT':136})
+
+test.main()
+
+test.pass_test()
diff --git a/timings/JTimer/config.js b/timings/JTimer/config.js
new file mode 100644
index 000000000..e1d8f1000
--- /dev/null
+++ b/timings/JTimer/config.js
@@ -0,0 +1,3 @@
+var Config = {
+ 'title': "timings/JTimer",
+};
diff --git a/timings/Java/SConstruct b/timings/Java/SConstruct
new file mode 100644
index 000000000..62945625c
--- /dev/null
+++ b/timings/Java/SConstruct
@@ -0,0 +1,25 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+env = Environment()
+env.Java('classes', 'src')
diff --git a/timings/Java/TimeSCons-run.py b/timings/Java/TimeSCons-run.py
new file mode 100644
index 000000000..9414d57ca
--- /dev/null
+++ b/timings/Java/TimeSCons-run.py
@@ -0,0 +1,55 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+"""
+This configuration times simple Java compilation.
+
+We create $JAVA_COUNT on-disk Java files in a 'src' subdirectory,
+and the SConstruct file builds them. That's it.
+"""
+
+import TestSCons
+
+# Full-build time of just under 10 seconds on ubuntu-timings slave,
+# as determined by bin/calibrate.py on 21 May 2010:
+#
+# run 1: 14.564: JAVA_COUNT=100
+# run 2: 9.692: JAVA_COUNT=68
+# run 3: 9.654: JAVA_COUNT=68
+# run 4: 9.635: JAVA_COUNT=68
+
+test = TestSCons.TimeSCons(variables={'JAVA_COUNT':68})
+
+test.subdir('src')
+
+contents = """\
+package src;
+public class j%04d {}
+"""
+
+for d in range(test.variables['JAVA_COUNT']):
+ test.write(['src', 'j%04d.java' % d], contents % d)
+
+test.main()
+
+test.pass_test()
diff --git a/timings/Java/config.js b/timings/Java/config.js
new file mode 100644
index 000000000..c0d688e66
--- /dev/null
+++ b/timings/Java/config.js
@@ -0,0 +1,3 @@
+var Config = {
+ 'title': "timings/Java",
+};
diff --git a/timings/README.txt b/timings/README.txt
new file mode 100644
index 000000000..3c0baffeb
--- /dev/null
+++ b/timings/README.txt
@@ -0,0 +1,178 @@
+# __COPYRIGHT__
+
+This directory contains timing configurations for SCons.
+
+Each configuration exists in a subdirectory. The controlling script
+is named TimeSCons-run.py for the configuration. The TimeSCons-run.py
+scripts use TestSCons.TimeSCons, a subclass of TestSCons.TestSCons (both
+defined in ../QMTest/TestSCons.py), to manage execution of the timing
+runs.
+
+Unlike the TestSCons.TestSCons base class, the TestSCons.TimeSCons
+subclass copies the contents of its containing directory to the temporary
+working directory. (It avoids copying the .svn directory, and any files
+or directories that start with the string "TimeSCons-".) This allows
+the timing configuration files to be checked in directly to our source
+code management system, instead of requiring that they be created from
+in-line data inside the script.
+
+The simplest-possible TimeSCons-run.py script would look like:
+
+ import TestSCons
+ TestSCons.TimeSCons().main()
+
+The above script would end up executing a SConstruct file configuration
+in a temporary directory. The main() method is the standard interface
+for a timing run. See its docstring for precisely what it does.
+
+Although the TestSCons.TimeSCons subclass copies its directory contents to
+a temporary working directory for the timing run, because it is a subclass
+of TestSCons.TestSCons, it *can* also create files or directories from
+in-line data. This is typically done when it's necessary to create
+hundreds of identical input files or directories before running the
+timing test, to avoid cluttering our SCM system with hundreds of otherwise
+meaningless files.
+
+
+STRUCTURE OF A TIMING CONFIGURATION
+===================================
+
+A timing configuration should be in a subdirectory and should contain
+at least the following three files:
+
+ TimeSCons-run.py
+
+ The script that controls the timing run. It looks a lot
+ like the test scripts (since it uses the same infrastructure)
+ except that you instantiate TestSCons.TimeSCons object, not a
+ TestSCons.TestSCons object.
+
+ Typically you want to initialize the object with a "variables"
+ dict containing one or more parameters whose values control the
+ scale of the configuration. This would typically be the number
+ of source files, directories to scan, etc. The TimeSCons-run.py
+ script can then use the value of those variables to generate that
+ many copies of input source files, or directories, or what have,
+ from in-line data, instead of having to check in a couple hundred
+ files for a large configuration.
+
+ These variables get passed to the timed SCons invocation as
+ ARGUMENT= arguments on the command line, so the SConstruct
+ file can use it to loop through the right number of files /
+ directories / what have you.
+
+ SConstruct
+
+ This is the actual SCons configuration that gets tested. It has
+ access to the variable(s) that control the configuration as
+ ARGUMENTS from the command line.
+
+ It's possible for the SConstruct file to do additional set up of
+ input files and the like, but in general that should be kept to
+ a minimum. We want what the SConscript file does to be dominated
+ by the actual execution we're timing, not initialization stuff,
+ so most initialization is better left in TimeSCons-run.py.
+
+ config.js
+
+ This gives our buildbot information about the timing configuration
+ (specifically, the title) for display.
+
+Note that it's perfectly acceptable to check in additional files that
+may be necessary for your configuration. They'll get copied to the
+temporary directory used to run the timing.
+
+
+RUNNING YOUR TIMING CONFIGURATION
+=================================
+
+Because the TimeSCons.py module is a subclass of the whole TestSCons
+hierarchy, you use a normal runtest.py invocation to run the timings
+configuration:
+
+ $ python runtest.py timings/Configuration/TimeSCons-run.py
+
+This runs the entire timing configuration, which actually runs SCons
+itself three times:
+
+ 1) First, with the --help option, to exit immediately after
+ reading the SConscript file(s). This allows us to get a
+ rough independent measurement of how much startup cost is
+ involved in this configuration, so that the amount can be
+ discounted from the
+
+ 2) A full build.
+
+ 3) An rebuild of the full build, which is presumably up-to-date.
+
+When you execute runtest.py from the command line, the output of
+each SCons run is printed on standard output. (Note this means
+that the output can get pretty large if the timing configuration
+involves thousands of files.)
+
+The collected memory and time statistics for each run are printed
+on standard output, each with the prefix "TRACE:". These are the
+lines that the buildbot grabs to collect the timing statistics for
+the graphs available on the web site.
+
+
+CALIBRATING YOUR TIMING CONFIGURATION
+=====================================
+
+One goal we have for timing configurations is that they should take
+about 10 seconds to run on our buildbot timing system, which is an older,
+slower system than most.
+
+Per above, you presumably defined one or more variables that control the
+"size" of your configuration: the number of input files, directories,
+etc. The timing infrastructure actually reports the value of these
+variables in a way that lets us automate the process of adjusting the
+variable values to run within a specified amount of time.
+
+The bin/calibrate.py will run your configuration repeatedly, adjusting
+the value(s) of the variable(s) that control your configuration until
+it gets three successive runs that take between 9.5 and 10.0 seconds
+(by default, options let you adjust the range):
+
+ $ python bin/calibrate.py timings/MyNewTimingConfiguration/TimeSCons-run.py
+ run 1: 3.124: TARGET_COUNT=50
+ run 2: 11.936: TARGET_COUNT=160
+ run 3: 9.175: TARGET_COUNT=134
+ run 4: 10.489: TARGET_COUNT=146
+ run 5: 9.798: TARGET_COUNT=139
+ run 6: 9.695: TARGET_COUNT=139
+ run 7: 9.670: TARGET_COUNT=139
+ $
+
+If you have multiple variables, it will adjust *all* of the variables
+on each run. In other words, the proportion between your variables will
+remain (relatively) constant.
+
+Of course, this needs to be run on a quiet system for the numbers
+to converge. And what you really need to do before committing a
+configuration is run bin/calibrate.py on the actual system that runs
+our Buildbot timings. For that, see Bill Deegan or Steven Knight.
+
+Once you have "good" values for your variables, put them in your
+TimeSCons-run.py and you should be good to go. Note that we've started a
+convention of also pasting the run output from calibrate.py into comments
+in the TimeSCons-run.py, just to preserve some of the historical context
+that led to certain values being chosen.
+
+
+ADDING A NEW TIMING CONFIGURATION
+=================================
+
+In addition to creating a subdirectory with at least the pieces listed
+above in the "STRUCTURE" section and "CALIBRATING" your variable(s),
+you need to update the following file in this directory:
+
+ index.html
+
+ Add an entry to the test_map dictionary for the subdirectory
+ you just created.
+
+That should be it before checkin. After checkin, one of the Buildbot
+administrators (currently Bill Deegan or Steven Knight) needs to update
+and restart the Buildbot master so that it will start executing the
+build step to run the new timing configuration.
diff --git a/timings/SCons_Bars.py b/timings/SCons_Bars.py
new file mode 100644
index 000000000..2b677e94f
--- /dev/null
+++ b/timings/SCons_Bars.py
@@ -0,0 +1,123 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+"""
+A quick module for central collection of information about which
+Subversion revisions are important for performance implications.
+"""
+
+class Bars(dict):
+ """
+ Dictionary subclass for mapping revision numbers to labels describing
+ each revision.
+
+ We provide two extensions: a .color attribute (for the default
+ color) and a .gnuplot() method (for returning a list of revisions
+ in the tuple format that scons-time uses to describe vertical bars).
+ """
+ def __init__(self, dict=None, color=None, **kwargs):
+ super(Bars, self).__init__(dict, **kwargs)
+ self.color = color
+ def gnuplot(self, color=None, labels=False, revs=None):
+ if color is None:
+ color = self.color
+ if revs is None:
+ revs = sorted(self.keys())
+ if labels:
+ result = [ (r, color, None, self[r]) for r in revs ]
+ else:
+ result = [ (r, color, None, None) for r in revs ]
+ return tuple(result)
+
+# The Release_Bars dictionary records the Subversion revisions that
+# correspond to each official SCons release.
+
+Release_Bars = Bars(
+ color = 7,
+ dict = {
+ 1232 : '0.96.90',
+ 1344 : '0.96.91',
+ 1435 : '0.96.92',
+ 1674 : '0.96.93',
+ 1765 : '0.96.94',
+ 1835 : '0.96.95',
+ 1882 : '0.96.96',
+ 1901 : '0.97',
+ 2242 : '0.97.0d20070809',
+ 2454 : '0.97.0d20070918',
+ 2527 : '0.97.0d20071212',
+ },
+)
+
+
+# The Revisions_Bars dictionary records the Subversion revisions that
+# correspond to "interesting" changes in timing. This is essentially the
+# global list of interesting changes. Individual timing configurations
+# typically only display bars for a subset of these, the ones that
+# actually affect their configuration.
+#
+# Note that the default behavior of most of the st.conf files is to
+# *not* display the labels for each of these lines, since they're long
+# and verbose. So in practice they function as comments describing the
+# changes that have timing impacts on various configurations.
+
+Revision_Bars = Bars(
+ color = 5,
+ dict = {
+ 1220 : 'Use WeakValueDicts in the Memoizer to reduce memory use.',
+ 1224 : 'Don\'t create a Node for every file we try to find during scan.',
+ 1231 : 'Don\'t pick same-named directories in a search path.',
+ 1241 : 'Optimize out N*M suffix matching in Builder.py.',
+ 1245 : 'Reduce gen_binfo() time for long source lists.',
+ 1261 : 'Fix -j re-scanning built files for implicit deps.',
+ 1262 : 'Match Entries when searching paths for Files or Dirs.',
+ 1273 : 'Store paths in .sconsign relative to target directory.',
+ 1282 : 'Cache result from rel_path().',
+ 1307 : 'Move signature Node tranlation of rel_paths into the class.',
+ 1346 : 'Give subst logic its own module.',
+ 1349 : 'More efficient checking for on-disk file entries.',
+ 1407 : 'Use a Dir scanner instead of a hard-coded method.',
+ 1433 : 'Remove unnecessary creation of RCS and SCCS Node.Dir nodes.',
+ 1435 : 'Don\'t convert .sconsign dependencies to Nodes until needed.',
+ 1468 : 'Use waiting-Node reference counts to speed up Taskmaster.',
+ 1477 : 'Delay disambiguation of Node.FS.Entry into File/Dir.',
+ 1533 : 'Fix some disambiguation-delay ramifications.',
+ 1655 : 'Reduce unnecessary calls to Node.FS.disambiguate().',
+ 1703 : 'Lobotomize Memoizer.',
+ 1706 : 'Fix _doLookup value-cache misspellings.',
+ 1712 : 'PathList, restore caching of Builder source suffixes.',
+ 1724 : 'Cache Node.FS.find_file() and Node.FS.Dir.srcdir_find_file().',
+ 1727 : 'Cache Executor methods, reduce calls when scanning.',
+ 1752 : 'Don\'t cache Builder source suffixes too early.',
+ 1790 : 'Clean up various module imports (pychecker fixes).',
+ 1794 : 'Un-fix various later-Python-version pychecker "fixes".',
+ 1828 : 'Speed up Builder suffix-matching (SuffixMap).',
+ 2380 : 'The Big Signature Refactoring hits branches/core.',
+ },
+)
+
+# Local Variables:
+# tab-width:4
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=4 shiftwidth=4:
diff --git a/timings/changelog.html b/timings/changelog.html
new file mode 100644
index 000000000..586ebadc6
--- /dev/null
+++ b/timings/changelog.html
@@ -0,0 +1,204 @@
+<html>
+<head>
+<script src="js/common.js"></script>
+<style type="text/css">
+table {
+ border-collapse: collapse;
+}
+thead {
+ border-top: solid 1px gray;
+ border-left: solid 1px gray;
+}
+tbody {
+ border-top: solid 1px gray;
+ border-bottom: solid 1px gray;
+ border-left: solid 1px gray;
+}
+th {
+ text-align: center;
+ border-right: solid 1px gray;
+}
+td {
+ padding-left: 0.5em;
+ padding-top: 0.3em;
+ padding-bottom: 0.3em;
+ padding-right: 1.4em;
+ border-top: solid 1px gray;
+ vertical-align: top;
+ font-family: monospace;
+}
+form {
+ background-color: lightblue;
+ border: 1px solid gray;
+ padding: 2px;
+}
+iframe#content {
+ border: none;
+ width: 0px;
+ height: 0px;
+}
+/*
+form {
+ position: fixed;
+ left: 0px;
+ top: 0px;
+ width: 100%;
+}
+*/
+</style>
+</head>
+<body>
+<form name="ui">
+ SVN path: <input id="url" type="text" name="url" value="">
+ SVN revision range: <input id="range" type="text" name="range" value="">
+ <input id="mode_text" type="radio" name="mode" value="text">text
+ <input id="mode_html" type="radio" name="mode" value="html">html
+ <input type="submit" value="Show Changelog">
+</form>
+
+<script>
+params = ParseParams();
+
+function fix_text(str, n) {
+ if (str.length > n)
+ return str.substring(0, n);
+
+ for (var i = str.length; i < n; ++i)
+ str = str + ' ';
+ return str;
+}
+
+function get_entries() {
+ return content.contentDocument.getElementsByTagName("logentry");
+}
+
+function get_info(entry) {
+ var r = new Object;
+ r.rev = entry.getAttribute("revision");
+ r.author = entry.getElementsByTagName("author")[0].textContent;
+ r.msg = entry.getElementsByTagName("msg")[0].textContent;
+ r.paths = [];
+ var paths = entry.getElementsByTagName("path")
+ for (var i = 0; i < paths.length; ++i) {
+ r.paths.push({"action" : paths[i].getAttribute("action"),
+ "value" : paths[i].textContent});
+ }
+ return r;
+}
+
+function render_log_callback() {
+ if ("mode" in params && params.mode == "text") {
+ var out = document.createElement("PRE");
+ document.body.appendChild(out);
+
+ var entries = get_entries();
+ for (var i = 0; i < entries.length; ++i) {
+ var info = get_info(entries[i]);
+
+ var msg = info.msg;
+ msg = msg.replace(/\n/g, ' ' );
+ msg = msg.replace(/\t/g, ' ' );
+ while (msg.charAt(0) == ' ')
+ msg = msg.substring(1);
+
+ var msg_clipped = msg.substring(0, 66);
+ if (msg_clipped.length < msg.length)
+ msg_clipped = msg_clipped + "...";
+
+ out.appendChild(document.createTextNode(
+ fix_text(info.rev, 6) + " " +
+ fix_text(info.author, 8) + " " +
+ msg_clipped + "\n"));
+ }
+ } else {
+ var table = document.createElement("TABLE");
+ table.setAttribute("class", "log");
+ document.body.appendChild(table);
+
+ var entries = get_entries();
+ for (var i = 0; i < entries.length; ++i) {
+ var info = get_info(entries[i]);
+
+ var tr = document.createElement("TR");
+ table.appendChild(tr);
+
+ var td, a;
+
+ // revision:
+ td = document.createElement("TD");
+ tr.appendChild(td);
+
+ a = document.createElement("A");
+ a.setAttribute("href", "http://scons.tigris.org/source/browse/scons?view=rev&revision=" + info.rev);
+ a.appendChild(document.createTextNode(info.rev));
+
+ td.appendChild(a);
+
+ // author:
+ td = document.createElement("TD");
+ tr.appendChild(td);
+
+ a = document.createElement("A");
+ a.setAttribute("href", "mailto:" + info.author);
+ a.appendChild(document.createTextNode(info.author));
+
+ td.appendChild(a);
+
+ // details:
+ td = document.createElement("TD");
+ tr.appendChild(td);
+
+ var p = document.createElement("PRE");
+ td.appendChild(p);
+
+ var s = info.msg;
+ p.appendChild(document.createTextNode(s));
+
+ for (var j = 0; j < info.paths.length; ++j) {
+ td.appendChild(document.createTextNode(info.paths[j]["action"] + " - "))
+ var a = document.createElement("A");
+ a.setAttribute("href", "http://scons.tigris.org/source/browse/scons" + info.paths[j]["value"] + "?r1=" + info.rev + "&r2=" + (info.rev - 1) + "&pathrev=" + info.rev);
+ a.appendChild(document.createTextNode(info.paths[j]["value"]));
+ td.appendChild(a);
+ td.appendChild(document.createElement("BR"));
+ }
+ }
+ }
+}
+
+function render_log() {
+ var svn_url = params["url"];
+ var svn_range = params["range"];
+ if (svn_url == undefined || svn_range == undefined)
+ return;
+
+ var url = "http://" + location.host + "/cgi-bin/svn-log?url=http://codf21.jail/svn/" +
+ unescape(svn_url) + "&range=" + unescape(svn_range);
+
+ // global 'content' variable: a hidden iframe used to fetch svn data.
+ content = document.createElement("IFRAME");
+ content.setAttribute("id", "content");
+ content.setAttribute("onload", "render_log_callback()");
+ content.setAttribute("src", url);
+ document.body.appendChild(content);
+
+ var el;
+ if ("mode" in params && params["mode"] == "text") {
+ el = document.getElementById("mode_text");
+ } else {
+ el = document.getElementById("mode_html");
+ }
+ el.setAttribute("checked", "1");
+
+ el = document.getElementById("url");
+ el.setAttribute("value", unescape(svn_url));
+
+ el = document.getElementById("range");
+ el.setAttribute("value", unescape(svn_range));
+}
+
+render_log()
+</script>
+</body>
+</html>
+
diff --git a/timings/graph.html b/timings/graph.html
new file mode 100644
index 000000000..de12ff5b8
--- /dev/null
+++ b/timings/graph.html
@@ -0,0 +1,413 @@
+<html>
+
+<!--
+ Copyright (c) 2006-2009 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+
+<!--
+ A brief note on terminology as used here: a "graph" is a plotted screenful
+ of data, showing the results of one type of test: for example, the
+ page-load-time graph. A "trace" is a single line on a graph, showing one
+ one for the test: for example, the reference build trace on the
+ page-load-time graph.
+
+ This page plots arbitrary numerical data loaded from files in a specific
+ format. It uses two or more data files, all JSON-encoded:
+
+ graphs.dat: a list of objects, each with these properties: name (the name
+ of a graph) and units (the units for the data to be read by humans).
+ Schematically:
+ [{"name": <graph_name>, "units": <units>}, ...]
+
+ <graphname>-summary.dat: for each of the graphs listed in graphs.dat, the
+ corresponding summary file holds rows of data. Each row of data is an
+ object with several properties:
+ "rev": the revision number for this row of data
+ "traces": an object with several properties of its own. The name of
+ the property corresponds to a trace name, used only as an
+ internal identifier, and the property's value is an array of
+ its measurement and that measurement's standard deviation (or
+ other measurement error).
+ Schematically:
+ {"rev": <rev>,
+ "traces": {<trace_name1>: [<value1>, <stddev1>],
+ <trace_name2>: [<value2>, <stddev2>], ...}
+ }
+-->
+<head>
+<style>
+body {
+ font-family: sans-serif;
+}
+div#output {
+ cursor: pointer;
+}
+div#switcher {
+ cursor: pointer;
+}
+div#switcher a {
+ border-top: 1px solid black;
+ border-left: 1px solid black;
+ padding-left: 0.5em;
+ padding-right: 0.5em;
+}
+canvas.plot {
+ border: 1px solid black;
+}
+div.plot-coordinates {
+ font-family: monospace;
+}
+iframe {
+ display: none;
+ width: 100%;
+ height: 100%;
+ border: none;
+}
+div.selector {
+ border: solid 1px black;
+ cursor: pointer;
+ padding-left: 0.3em;
+ background-color: white;
+}
+div.selector:hover {
+ background-color: rgb(200,200,250);
+}
+div.selected {
+ border-left: none;
+}
+div#selectors {
+ width: 80px;
+ display: none;
+}
+#explain {
+ font-size: 0.75em;
+ font-style: italic;
+ color: rgb(100,100,100);
+}
+</style>
+
+<script src="js/common.js"></script>
+<script src="js/plotter.js"></script>
+<script src="js/coordinates.js"></script>
+<script src="config.js"></script>
+<script>
+Config.source = "http://scons.tigris.org/svn/scons/trunk";
+Config.changeLinkPrefix = "changelog.html?mode=html&range=";
+Config.builder = "TODO";
+Config.buildbotLink = "http://buildbot.scons.org:8010/";
+Config.detailTabs = {'view-change': 'CL'};
+document.title = Config.title + ' - ' + Config.buildslave;
+
+var did_position_details = false;
+var units = 'thing-a-ma-bobs';
+var graph_list = [];
+var first_trace = '';
+
+var params = ParseParams();
+
+function jsonToJs(data) {
+ return eval('(' + data + ')')
+}
+
+function report_error(error) {
+ document.getElementById("output").innerHTML = "<p>" + error + "</p>";
+}
+
+function received_graph_list(data, error) {
+ if (error) {
+ report_error(error);
+ return;
+ }
+ graph_list = jsonToJs(data);
+
+ if (!('graph' in params) || params.graph == '') {
+ if (graph_list.length > 0)
+ params.graph = graph_list[0].name
+ }
+
+ // Add a selection tab for each graph, and find the units for the selected
+ // one while we're at it.
+ tabs = [];
+ for (var index = 0; index < graph_list.length; ++index) {
+ var graph = graph_list[index];
+ tabs.push(graph.name);
+ if (graph.name == params.graph)
+ units = graph.units;
+ }
+ initPlotSwitcher(tabs);
+
+ // Fetch the data for the selected graph.
+ fetch_summary();
+}
+
+function go_to(graph) {
+ params.graph = graph;
+ if (params.graph == '')
+ delete params.graph;
+ window.location.href = MakeURL(params);
+}
+
+function get_url() {
+ new_url = window.location.href;
+ new_url = new_url.replace(/\?lookout/, "?");
+ new_url = new_url.replace(/\&thumbnail/, "");
+ return new_url;
+}
+
+function on_clicked_plot(prev_cl, cl) {
+ if ('lookout' in params) {
+ window.open(get_url());
+ return;
+ }
+
+ // Define sources for detail tabs
+ if ('view-change' in Config.detailTabs) {
+ document.getElementById('view-change').
+ // TODO: The tigris.org source browser only lets us pull up
+ // one revision. That's okay for our current behavior of
+ // timing each revision separately, but if we go back to merging
+ // build requests from multiple revisions, we'll need an
+ // intermediary CGI script.
+ //setAttribute('src', Config.changeLinkPrefix + prev_cl + ':' + cl);
+ setAttribute('src',
+ 'http://scons.tigris.org/source/browse/scons?view=rev&revision=' + cl);
+ }
+ if ('view-pages' in Config.detailTabs) {
+ document.getElementById('view-pages').
+ setAttribute('src', 'details.html?cl=' + cl + '&trace=' + first_trace);
+ }
+ if ('view-coverage' in Config.detailTabs) {
+ document.getElementById('view-coverage').
+ setAttribute('src', Config.coverageLinkPrefix + cl);
+ }
+
+ if (!did_position_details) {
+ position_details();
+ did_position_details = true;
+ }
+}
+
+function received_summary(data, error) {
+ if (error) {
+ report_error(error);
+ return;
+ }
+ // Parse the summary data file.
+ var rows = data.split('\n');
+ var max_rows = rows.length;
+ if ('history' in params && max_rows > params.history) {
+ max_rows = params.history;
+ } else if ('lookout' in params && max_rows > 150) {
+ max_rows = 150;
+ }
+
+ var allTraces = {};
+
+ // graphData[rev] = {trace1:[value, stddev], trace2:[value, stddev], ...}
+ var graphData = {};
+ for (var i = 0; i < max_rows; ++i) {
+ if (!rows[i].length)
+ continue;
+ var row = jsonToJs(rows[i]);
+ var traces = row['traces'];
+ var revision = parseInt(row['rev']);
+ graphData[revision] = traces;
+
+ // Collect unique trace names.
+ for (var traceName in traces)
+ allTraces[traceName] = 1;
+ }
+
+ // Build a list of all the trace names we've seen, in the order in which
+ // they appear in the data file. Although JS objects are not required by
+ // the spec to iterate their properties in order, in practice they do,
+ // because it causes compatibility problems otherwise.
+ var traceNames = [];
+ for (var traceName in allTraces)
+ traceNames.push(traceName);
+
+ first_trace = traceNames[0];
+
+ // Build and numerically sort a list of revision numbers.
+ var revisionNumbers = [];
+ for (var rev in graphData)
+ revisionNumbers.push(rev);
+ revisionNumbers.sort(
+ function(a, b) { return parseInt(a, 10) - parseInt(b, 10) });
+
+ // Build separate ordered lists of trace data.
+ var traceData = {};
+ for (var revIndex = 0; revIndex < revisionNumbers.length; ++revIndex) {
+ var rev = revisionNumbers[revIndex];
+ var revisionData = graphData[rev];
+ for (var nameIndex = 0; nameIndex < traceNames.length; ++nameIndex) {
+ var traceName = traceNames[nameIndex];
+ if (!traceData[traceName])
+ traceData[traceName] = [];
+ if (!revisionData[traceName])
+ traceData[traceName].push([NaN, NaN]);
+ else
+ traceData[traceName].push(revisionData[traceName]);
+ }
+ }
+ var plotData = [];
+ for (var traceName in traceData)
+ plotData.push(traceData[traceName]);
+
+ var plotter = new Plotter(revisionNumbers, plotData, traceNames, units,
+ document.getElementById("output"), true);
+ plotter.onclick = on_clicked_plot;
+ plotter.plot();
+}
+
+function fetch_summary() {
+ if ('graph' in params)
+ file = escape(params.graph) + ".dat"
+ else
+ file = "summary.dat"
+ Fetch(file, received_summary);
+}
+
+function fetch_graph_list() {
+ Fetch("graphs.dat", received_graph_list);
+}
+
+function initPlotSwitcher(tabs) {
+ var switcher = document.getElementById("switcher");
+ for(var i = 0; i < tabs.length; i++) {
+ var anchor = document.createElement("a");
+ anchor.appendChild(document.createTextNode(tabs[i] + " "));
+ anchor.addEventListener("click", goToClosure(tabs[i]), false);
+ switcher.appendChild(anchor);
+ }
+}
+
+function goToClosure(graph) {
+ return function(){go_to(graph)};
+}
+
+function position_details() {
+ var output = document.getElementById("output");
+
+ var win_height = window.innerHeight;
+
+ var details = document.getElementById("views");
+
+ var views = document.getElementById("views");
+ var selectors = document.getElementById("selectors");
+ selectors.style.display = "block";
+
+ var views_width = output.offsetWidth - selectors.offsetWidth;
+
+ views.style.border = "1px solid black";
+ views.style.width = views_width + "px";
+ views.style.height = (win_height - output.offsetHeight - output.offsetTop -
+ 30) + "px";
+
+ selectors.style.position = "absolute";
+ selectors.style.left = (views.offsetLeft + views_width + 1) + "px";
+ selectors.style.top = views.offsetTop + "px";
+
+ // Change to the first detail tab
+ for (var tab in Config.detailTabs) {
+ change_view(tab);
+ break;
+ }
+}
+
+function change_view(target) {
+ for (var tab in Config.detailTabs) {
+ document.getElementById(tab).style.display =
+ (tab == target ? "block" : "none");
+ }
+}
+
+function init() {
+ // We need to fill the graph list before parsing the params or fetching the
+ // data, so we have a default graph in case none was specified.
+ fetch_graph_list();
+}
+
+window.addEventListener("load", init, false);
+</script>
+</head>
+
+
+<body>
+<div id="header_lookout" align="center">
+ <font style='color: #0066FF; font-family: Arial, serif;
+ font-size: 20pt; font-weight: bold;'>
+ <script>
+ document.write("<a target=\"_blank\" href=\"");
+ document.write(get_url());
+ document.write("\">");
+ if ('header' in params && params.header != '') {
+ document.write(escape(params.header));
+ } else {
+ document.write(Config.title);
+ }
+ document.write("</a>");
+ </script>
+ </font>
+</div>
+
+<div id="header_text">
+<script>
+document.write('<a href="' + Config.buildbotLink + '">SCons buildbot</a>' +
+ ' timings for the <b>' + Config.title + '</b> configuration.')
+if ('graph' in params)
+ document.write(' Displaying values for <b>' + params.graph + '</b>.');
+</script>
+</div>
+
+<div id="explain">
+The vertical axis is measured values, and the horizontal
+axis is the revision number being tested.
+</div>
+<p></p>
+<div id="switcher">
+
+</div>
+<div id="output"></div>
+<div id="details">
+ <div id="views">
+ <script>
+ for (var tab in Config.detailTabs) {
+ document.write("<iframe id=\"" + tab + "\"></iframe>");
+ }
+ </script>
+ </div>
+ <div id="selectors">
+ <script>
+ var firstTab = true;
+ for (var tab in Config.detailTabs) {
+ document.write("<div ");
+ if (firstTab) {
+ firstTab = false;
+ } else {
+ document.write("style=\"border-top: none\" ");
+ }
+ document.write("class=\"selector\" onclick=\"change_view('"
+ + tab + "')\">" + Config.detailTabs[tab] + "</div>");
+ }
+ </script>
+ </div>
+</div>
+<pre id="log"></pre>
+<script>
+if ('lookout' in params) {
+ document.getElementById("switcher").style.display = "none";
+ document.getElementById("details").style.display = "none";
+ document.getElementById("header_text").style.display = "none";
+ document.getElementById("explain").style.display = "none";
+ if ('thumbnail' in params) {
+ document.getElementById("header_lookout").style.display = "none";
+ }
+} else {
+ document.getElementById("header_lookout").style.display = "none";
+}
+</script>
+</body>
+</html>
diff --git a/timings/hundred/SConstruct b/timings/hundred/SConstruct
new file mode 100644
index 000000000..b321d1085
--- /dev/null
+++ b/timings/hundred/SConstruct
@@ -0,0 +1,35 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+target_count = int(ARGUMENTS['TARGET_COUNT'])
+
+def copy_files( env, target, source ):
+ for t, s in zip(target, source):
+ open(str(t), 'wb').write(open(str(s), 'rb').read())
+
+source_list = ['source_%04d' % t for t in range(target_count)]
+target_list = ['target_%04d' % t for t in range(target_count)]
+
+env = Environment()
+
+env.Command( target_list, source_list, copy_files )
diff --git a/timings/hundred/TimeSCons-run.py b/timings/hundred/TimeSCons-run.py
new file mode 100644
index 000000000..3d5b02f9e
--- /dev/null
+++ b/timings/hundred/TimeSCons-run.py
@@ -0,0 +1,54 @@
+#
+# __COPYRIGHT__
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+"""
+This configuration is for timing how we handle the NxM interaction when
+we build a lot of targets from a lot of source files.
+
+We create a list of $TARGET_COUNT target files that will each be built by
+copying a file from a corresponding list of $TARGET_COUNT source files.
+The source files themselves are each built by a Python function action
+that's the equivalent of "echo contents > $TARGET".
+"""
+
+import TestSCons
+
+# Full-build time of just under 10 seconds on ubuntu-timings slave,
+# as determined by bin/calibrate.py on 9 December 2009:
+#
+# run 1: 3.124: TARGET_COUNT=50
+# run 2: 11.936: TARGET_COUNT=160
+# run 3: 9.175: TARGET_COUNT=134
+# run 4: 10.489: TARGET_COUNT=146
+# run 5: 9.798: TARGET_COUNT=139
+# run 6: 9.695: TARGET_COUNT=139
+# run 7: 9.670: TARGET_COUNT=139
+
+test = TestSCons.TimeSCons(variables={'TARGET_COUNT':139})
+
+for t in range(test.variables['TARGET_COUNT']):
+ open('source_%04d' % t, 'wb' ).write('contents\n')
+
+test.main()
+
+test.pass_test()
diff --git a/timings/hundred/config.js b/timings/hundred/config.js
new file mode 100644
index 000000000..63de6dfaf
--- /dev/null
+++ b/timings/hundred/config.js
@@ -0,0 +1,3 @@
+var Config = {
+ 'title': "timings/hundred",
+};
diff --git a/timings/index.html b/timings/index.html
new file mode 100644
index 000000000..79c20393b
--- /dev/null
+++ b/timings/index.html
@@ -0,0 +1,199 @@
+<html>
+ <head>
+ <title>SCons Timings</title>
+ <script src="js/common.js"></script>
+ <script language="javascript">
+ var perf_url = DirName(window.location.href);
+ function DisplayGraph(name, heighth, width, thumbnail, graph) {
+ var params = ParseParams();
+ var url = perf_url + '/' + name + '/graph.html' + '?lookout';
+ //var debug = url;
+ var debug = '';
+ if ('history' in params) {
+ url += '&history=' + params.history
+ }
+ if (typeof graph == 'undefined') {
+ if ('graph' in params) {
+ graph = params.graph;
+ }
+ url += '&header=';
+ } else if (graph != null) {
+ url += '&header=' + graph;
+ }
+ if (graph != null) {
+ url += '&graph=' + graph;
+ }
+ if (thumbnail) {
+ url += '&thumbnail';
+ }
+ document.write('<td>' + debug + '<iframe' +
+ //' scrolling="no"' +
+ ' height="' + heighth + '"' +
+ ' width="' + width + '"' +
+ ' src="' + url + '">' +
+ '</iframe></td>');
+ }
+ </script>
+ </head>
+ <body>
+ <center>
+ <script language="javascript">
+ var params = ParseParams();
+ // TODO: parameterize these in a .dat file
+ var builder_map = {
+ 'Ubuntu': 'ubuntu-timings',
+ };
+ var graphs_map = {
+ 'Performance': 'TimeSCons-elapsed',
+ 'Memory': 'memory-final',
+ };
+ var test_map = {
+ 'CPPPATH': 'CPPPATH',
+ 'ElectricCloud': 'ElectricCloud',
+ 'Java': 'Java',
+ 'JTimer': 'JTimer',
+ 'hundred': 'hundred',
+ };
+ if ('builder' in params) {
+ builder_list = params.builder.split(',');
+ } else {
+ builder_list = Keys(builder_map);
+ }
+ if ('graph' in params) {
+ graphs_list = param.graph.split(',');
+ } else {
+ graphs_list = Keys(graphs_map);
+ }
+ if ('test' in params) {
+ test_list = params.test.split(',');
+ } else {
+ test_list = Keys(test_map);
+ }
+ function write_builder_table() {
+ document.write('<table><tr>');
+ for (var tindex=0; tindex < test_list.length; tindex++) {
+ test = test_list[tindex];
+ test_name = test_map[test];
+ DisplayGraph(builder_id + '/' + test, 250, 400, false);
+ DisplayGraph(builder_id + '/' + test, 250, 400, false);
+ if ((tindex % 3) == 2) {
+ document.write('</tr><tr>');
+ }
+ }
+ document.write('</tr></table>');
+ }
+ function write_test_table() {
+ document.write('<table>');
+ row = [];
+ function write_row(row) {
+ document.write('<tr>');
+ for (var bidx in row) {
+ builder = row[bidx];
+ document.write('<th align="center"><p><br>' +
+ builder + '</p></th>');
+ }
+ document.write('</tr><tr>');
+ for (var bidx in row) {
+ builder_id = builder_map[row[bidx]];
+ DisplayGraph(builder_id + '/' + test, 250, 400, true);
+ }
+ document.write('</tr>');
+ }
+ for (var bindex=0; bindex < builder_list.length; bindex++) {
+ builder = builder_list[bindex];
+ row.push(builder)
+ if ((bindex % 3) == 2) {
+ write_row(row);
+ row = [];
+ }
+ }
+ if (row.length > 0) {
+ write_row(row);
+ }
+ document.write('</table>');
+ }
+ function write_builders_header_row() {
+ document.write('<tr><th>&nbsp;</th>');
+ for (var bindex=0; bindex < builder_list.length; bindex++) {
+ builder = builder_list[bindex];
+ url = MakeURL({'builder':escape(builder)});
+ var s ='<th align="center" colspan="' +
+ graphs_list.length +
+ '">' +
+ '<a href="' + url + '">' +
+ builder +
+ '</a></th>';
+ document.write(s);
+ }
+ document.write('</tr>');
+ }
+ function write_graphs_header_row() {
+ document.write('<tr><th>&nbsp;</th>');
+ for (var bindex=0; bindex < builder_list.length; bindex++) {
+ for (var gindex=0; gindex < graphs_list.length; gindex++) {
+ graph = graphs_list[gindex];
+ document.write('<th align="center">' + graph + '</th>');
+ }
+ }
+ }
+ function write_overview_table() {
+ document.write('<table>');
+ var width = document.body.scrollWidth / (builder_list.length + 2);
+ write_builders_header_row();
+ for (var tindex=0; tindex < test_list.length; tindex++) {
+ // Write a graphs header every four rows for navigability.
+ // TODO: Use more sophisticated freezing of the header rows?
+ if ((tindex % 4) == 0) {
+ write_graphs_header_row();
+ }
+ test = test_list[tindex];
+ test_name = test_map[test];
+ document.write('<tr>');
+ url = MakeURL({'test':escape(test)});
+ document.write('<th valign="center"><a href="' + url + '">'
+ + test_name + '</a></th>');
+ for (var bindex=0; bindex < builder_list.length; bindex++) {
+ builder = builder_list[bindex];
+ builder_id = builder_map[builder];
+ DisplayGraph(builder_id + '/' + test, 250, width, true, "TimeSCons-elapsed")
+ DisplayGraph(builder_id + '/' + test, 250, width, true, "memory-final")
+ }
+ document.write('<th valign="center"><a href="' + url + '">'
+ + test_name + '</a></th>');
+ document.write('</tr>');
+ }
+ write_graphs_header_row();
+ write_builders_header_row();
+ document.write('</table>');
+ }
+ function write_header(header_string, url_string) {
+ document.write('<h1><a href="' + window.location.pathname + '">'
+ + 'SCons Timings</a>');
+ if (header_string) {
+ document.write(': ' + header_string);
+ }
+ if ('graph' in params) {
+ document.write(': ' + escape(params.graph))
+ } else {
+ document.write(': overview');
+ }
+ document.write('</h1>');
+ }
+ if (builder_list.length == 0) {
+ builder = builder_list[0];
+ builder_id = builder_map[builder];
+ write_header(builder, '&builder=' + escape(builder));
+ write_builder_table()
+ } else if (test_list.length == 1) {
+ test = test_list[0];
+ test_name = test_map[test];
+ write_header(test_name, '&test=' + escape(test));
+ write_test_table()
+ } else {
+ write_header('', '');
+ write_overview_table();
+ }
+ </script>
+ </center>
+ </body>
+</html>
diff --git a/timings/js/common.js b/timings/js/common.js
new file mode 100644
index 000000000..80510b32a
--- /dev/null
+++ b/timings/js/common.js
@@ -0,0 +1,96 @@
+/*
+ Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+*/
+
+/*
+ Common methods for performance-plotting JS.
+*/
+
+function Fetch(url, callback) {
+ var r = new XMLHttpRequest();
+ r.open("GET", url, true);
+ r.setRequestHeader("pragma", "no-cache");
+ r.setRequestHeader("cache-control", "no-cache");
+ r.onreadystatechange = function() {
+ if (r.readyState == 4) {
+ var error;
+ var text = r.responseText;
+ if (r.status != 200) {
+ error = url + ": " + r.status + ": " + r.statusText;
+ } else if (! text) {
+ error = url + ": null response";
+ }
+ callback(text, error);
+ }
+ }
+
+ r.send(null);
+}
+
+// Returns the keys of an object.
+function Keys(obj) {
+ result = [];
+ for (key in obj) {
+ result.push(key)
+ }
+ return result
+}
+
+// Returns the "directory name" portion of the string (URL),
+// stripping the last element.
+function DirName(s) {
+ elements = s.split('/')
+ elements.pop()
+ return elements.join('/')
+}
+
+// Returns an Object with properties given by the parameters specified in the
+// URL's query string.
+function ParseParams() {
+ var result = new Object();
+ var s = window.location.search.substring(1).split('&');
+ for (i = 0; i < s.length; ++i) {
+ var v = s[i].split('=');
+ result[v[0]] = unescape(v[1]);
+ }
+ return result;
+}
+
+// Creates the URL constructed from the current pathname and the given params.
+function MakeURL(params) {
+ var url = window.location.pathname;
+ var sep = '?';
+ for (p in params) {
+ if (!p)
+ continue;
+ url = url + sep + p + '=' + params[p];
+ sep = '&';
+ }
+ return url;
+}
+
+// Returns a string describing an object, recursively. On the initial call,
+// |name| is optionally the name of the object and |indent| is not needed.
+function DebugDump(obj, opt_name, opt_indent) {
+ var name = opt_name || '';
+ var indent = opt_indent || '';
+ if (typeof obj == "object") {
+ var child = null;
+ var output = indent + name + "\n";
+
+ for (var item in obj) {
+ try {
+ child = obj[item];
+ } catch (e) {
+ child = "<Unable to Evaluate>";
+ }
+ output += DebugDump(child, item, indent + " ");
+ }
+
+ return output;
+ } else {
+ return indent + name + ": " + obj + "\n";
+ }
+}
diff --git a/timings/js/coordinates.js b/timings/js/coordinates.js
new file mode 100644
index 000000000..69cb4c22d
--- /dev/null
+++ b/timings/js/coordinates.js
@@ -0,0 +1,125 @@
+/*
+ Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+*/
+
+/**
+ * 'Understands' plot data positioning.
+ * @constructor
+ *
+ * @param {Array} plotData data that will be displayed
+ */
+function Coordinates(plotData) {
+ this.plotData = plotData;
+
+ height = window.innerHeight - 16;
+ width = window.innerWidth - 16;
+
+ this.widthMax = width;
+ this.heightMax = Math.min(400, height - 85);
+
+ this.xMinValue = -0.5;
+ this.xMaxValue = (this.plotData[0].length - 1)+ 0.5;
+ this.processYValues_();
+}
+
+Coordinates.prototype.processYValues_ = function () {
+ var merged = [];
+ for (var i = 0; i < this.plotData.length; i++)
+ for (var j = 0; j < this.plotData[i].length; j++)
+ merged.push(this.plotData[i][j][0]);
+ var max = Math.max.apply( Math, merged );
+ var min = Math.min.apply( Math, merged );
+
+ // If we have a missing value, find the real max and min the hard way.
+ if (isNaN(min)) {
+ for (var i = 0; i < merged.length; ++i) {
+ if (isNaN(min) || merged[i] < min)
+ min = merged[i];
+ if (isNaN(max) || merged[i] > max)
+ max = merged[i];
+ }
+ }
+ var yd = (max - min) / 10.0;
+ if (yd == 0)
+ yd = max / 10;
+ this.yMinValue = min - yd;
+ this.yMaxValue = max + yd;
+};
+
+/**
+ * Difference between horizontal max min values.
+ */
+Coordinates.prototype.xValueRange = function() {
+ return this.xMaxValue - this.xMinValue;
+};
+
+/**
+ * Difference between vertical max min values.
+ */
+Coordinates.prototype.yValueRange = function() {
+ return this.yMaxValue - this.yMinValue
+};
+
+/**
+ * Converts horizontal data value to pixel value on canvas.
+ * @param {number} value horizontal data value
+ */
+Coordinates.prototype.xPoints = function(value) {
+ return this.widthMax * ((value - this.xMinValue) / this.xValueRange());
+};
+
+/**
+ * Converts vertical data value to pixel value on canvas.
+ * @param {number} value vertical data value
+ */
+Coordinates.prototype.yPoints = function(value) {
+ /* Converts value to canvas Y position in pixels. */
+ return this.heightMax - this.heightMax * (value - this.yMinValue) /
+ this.yValueRange();
+};
+
+/**
+ * Converts X point on canvas to value it represents.
+ * @param {number} position horizontal point on canvas.
+ */
+Coordinates.prototype.xValue = function(position) {
+ /* Converts canvas X pixels to value. */
+ return position / this.widthMax * (this.xValueRange()) + this.xMinValue;
+};
+
+/**
+ * Converts Y point on canvas to value it represents.
+ * @param {number} position vertical point on canvas.
+ */
+Coordinates.prototype.yValue = function(position) {
+ /* Converts canvas Y pixels to value.
+ position is point value is from top.
+ */
+ var position = this.heightMax - position;
+ var ratio = parseFloat(this.heightMax / position);
+ return this.yMinValue + this.yValueRange() / ratio;
+};
+
+/**
+ * Converts canvas X pixel to data index.
+ * @param {number} xPosition horizontal point on canvas
+ */
+Coordinates.prototype.dataSampleIndex = function(xPosition) {
+ var xValue = this.xValue(xPosition);
+ var index;
+ if (xValue < 0) {
+ index = 0;
+ } else if (xValue > this.plotData[0].length - 1) {
+ index = this.plotData[0].length - 1;
+ } else {
+ index = xValue.toFixed(0);
+ }
+ return index;
+};
+
+Coordinates.prototype.log = function(val) {
+ document.getElementById('log').appendChild(
+ document.createTextNode(val + '\n'));
+};
diff --git a/timings/js/plotter.js b/timings/js/plotter.js
new file mode 100644
index 000000000..86fb23049
--- /dev/null
+++ b/timings/js/plotter.js
@@ -0,0 +1,336 @@
+/*
+ Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+*/
+
+// Collection of classes used to plot data in a <canvas>. Create a Plotter()
+// to generate a plot.
+
+// vertical marker for columns
+function Marker(color) {
+ var m = document.createElement("DIV");
+ m.setAttribute("class", "plot-cursor");
+ m.style.backgroundColor = color;
+ m.style.opacity = "0.3";
+ m.style.position = "absolute";
+ m.style.left = "-2px";
+ m.style.top = "-2px";
+ m.style.width = "0px";
+ m.style.height = "0px";
+ return m;
+}
+
+/**
+ * HorizontalMarker class
+ * Create a horizontal marker at the indicated mouse location.
+ * @constructor
+ *
+ * @param canvasRect {Object} The canvas bounds (in client coords).
+ * @param clientY {Number} The vertical mouse click location that spawned
+ * the marker, in the client coordinate space.
+ * @param yValue {Number} The plotted value corresponding to the clientY
+ * click location.
+ */
+function HorizontalMarker(canvasRect, clientY, yValue) {
+ // Add a horizontal line to the graph.
+ var m = document.createElement("DIV");
+ m.setAttribute("class", "plot-baseline");
+ m.style.backgroundColor = HorizontalMarker.COLOR;
+ m.style.opacity = "0.3";
+ m.style.position = "absolute";
+ m.style.left = canvasRect.offsetLeft;
+ var h = HorizontalMarker.HEIGHT;
+ m.style.top = (clientY - h/2).toFixed(0) + "px";
+ m.style.width = canvasRect.width + "px";
+ m.style.height = h + "px";
+ this.markerDiv_ = m;
+
+ this.value = yValue;
+}
+
+HorizontalMarker.HEIGHT = 5;
+HorizontalMarker.COLOR = "rgb(0,100,100)";
+
+// Remove the horizontal line from the graph.
+HorizontalMarker.prototype.remove_ = function() {
+ this.markerDiv_.parentNode.removeChild(this.markerDiv_);
+}
+
+/**
+ * Plotter class
+ * @constructor
+ *
+ * Draws a chart using CANVAS element. Takes array of lines to draw with
+ * deviations values for each data sample.
+ *
+ * @param {Array} clNumbers list of clNumbers for each data sample.
+ * @param {Array} plotData list of arrays that represent individual lines.
+ * The line itself is an Array of value and stdd.
+ * @param {Array} dataDescription list of data description for each line
+ * in plotData.
+ * @units {string} units name of measurement used to describe plotted data.
+ *
+ * Example of the plotData:
+ * [
+ * [line 1 data],
+ * [line 2 data]
+ * ].
+ * Line data looks like [[point one], [point two]].
+ * And individual points are [value, deviation value]
+ */
+function Plotter(clNumbers, plotData, dataDescription, units, resultNode) {
+ this.clNumbers_ = clNumbers;
+ this.plotData_ = plotData;
+ this.dataDescription_ = dataDescription;
+ this.resultNode_ = resultNode;
+ this.units_ = units;
+ this.coordinates = new Coordinates(plotData);
+
+ // A color palette that's unambigous for normal and color-deficient viewers.
+ // Values are (red, green, blue) on a scale of 255.
+ // Taken from http://jfly.iam.u-tokyo.ac.jp/html/manuals/pdf/color_blind.pdf
+ this.colors = [[0, 114, 178], // blue
+ [230, 159, 0], // orange
+ [0, 158, 115], // green
+ [204, 121, 167], // purplish pink
+ [86, 180, 233], // sky blue
+ [213, 94, 0], // dark orange
+ [0, 0, 0], // black
+ [240, 228, 66] // yellow
+ ];
+}
+
+/**
+ * Does the actual plotting.
+ */
+Plotter.prototype.plot = function() {
+ var canvas = this.canvas();
+ this.coordinates_div_ = this.coordinates_();
+ this.ruler_div_ = this.ruler();
+ // marker for the result-point that the mouse is currently over
+ this.cursor_div_ = new Marker("rgb(100,80,240)");
+ // marker for the result-point for which details are shown
+ this.marker_div_ = new Marker("rgb(100,100,100)");
+ var ctx = canvas.getContext("2d");
+ for (var i = 0; i < this.plotData_.length; i++)
+ this.plotLine_(ctx, this.nextColor(i), this.plotData_[i]);
+
+ this.resultNode_.appendChild(canvas);
+ this.resultNode_.appendChild(this.coordinates_div_);
+
+ this.resultNode_.appendChild(this.ruler_div_);
+ this.resultNode_.appendChild(this.cursor_div_);
+ this.resultNode_.appendChild(this.marker_div_);
+ this.attachEventListeners(canvas);
+ this.canvasRectangle = {
+ "offsetLeft": canvas.offsetLeft,
+ "offsetTop": canvas.offsetTop,
+ "width": canvas.offsetWidth,
+ "height": canvas.offsetHeight
+ };
+};
+
+Plotter.prototype.drawDeviationBar_ = function(context, strokeStyles, x, y,
+ deviationValue) {
+ context.strokeStyle = strokeStyles;
+ context.lineWidth = 1.0;
+ context.beginPath();
+ context.moveTo(x, (y + deviationValue));
+ context.lineTo(x, (y - deviationValue));
+ context.moveTo(x, (y - deviationValue));
+ context.closePath();
+ context.stroke();
+};
+
+Plotter.prototype.plotLine_ = function(ctx, strokeStyles, data) {
+ ctx.strokeStyle = strokeStyles;
+ ctx.lineWidth = 2.0;
+ ctx.beginPath();
+ var initial = true;
+ var deviationData = [];
+ for (var i = 0; i < data.length; i++) {
+ var x = this.coordinates.xPoints(i);
+ var value = data[i][0];
+ var stdd = data[i][1];
+ var y = 0.0;
+ var err = 0.0;
+ if (isNaN(value)) {
+ // Re-set 'initial' if we're at a gap in the data.
+ initial = true;
+ } else {
+ y = this.coordinates.yPoints(value);
+ // We assume that the stdd will only be NaN (missing) when the value is.
+ if (parseFloat(value) != 0.0)
+ err = y * parseFloat(stdd) / parseFloat(value);
+ if (initial)
+ initial = false;
+ else
+ ctx.lineTo(x, y);
+ }
+
+ ctx.moveTo(x, y);
+ deviationData.push([x, y, err])
+ }
+ ctx.closePath();
+ ctx.stroke();
+
+ for (var i = 0; i < deviationData.length; i++) {
+ this.drawDeviationBar_(ctx, strokeStyles, deviationData[i][0],
+ deviationData[i][1], deviationData[i][2]);
+ }
+};
+
+Plotter.prototype.attachEventListeners = function(canvas) {
+ var self = this;
+ canvas.parentNode.addEventListener(
+ "mousemove", function(evt) { self.onMouseMove_(evt); }, false);
+ this.cursor_div_.addEventListener(
+ "click", function(evt) { self.onMouseClick_(evt); }, false);
+};
+
+Plotter.prototype.updateRuler_ = function(evt) {
+ var r = this.ruler_div_;
+ r.style.left = this.canvasRectangle.offsetLeft + "px";
+
+ r.style.top = this.canvasRectangle.offsetTop + "px";
+ r.style.width = this.canvasRectangle.width + "px";
+ var h = evt.clientY - this.canvasRectangle.offsetTop;
+ if (h > this.canvasRectangle.height)
+ h = this.canvasRectangle.height;
+ r.style.height = h + "px";
+};
+
+Plotter.prototype.updateCursor_ = function() {
+ var c = this.cursor_div_;
+ c.style.top = this.canvasRectangle.offsetTop + "px";
+ c.style.height = this.canvasRectangle.height + "px";
+ var w = this.canvasRectangle.width / this.clNumbers_.length;
+ var x = (this.canvasRectangle.offsetLeft +
+ w * this.current_index_).toFixed(0);
+ c.style.left = x + "px";
+ c.style.width = w + "px";
+};
+
+
+Plotter.prototype.onMouseMove_ = function(evt) {
+ var canvas = evt.currentTarget.firstChild;
+ var positionX = evt.clientX - this.canvasRectangle.offsetLeft;
+ var positionY = evt.clientY - this.canvasRectangle.offsetTop;
+
+ this.current_index_ = this.coordinates.dataSampleIndex(positionX);
+ var yValue = this.coordinates.yValue(positionY);
+
+ this.coordinates_td_.innerHTML =
+ "r" + this.clNumbers_[this.current_index_] + ": " +
+ this.plotData_[0][this.current_index_][0].toFixed(2) + " " +
+ this.units_ + " +/- " +
+ this.plotData_[0][this.current_index_][1].toFixed(2) + " " +
+ yValue.toFixed(2) + " " + this.units_;
+
+ // If there is a horizontal marker, also display deltas relative to it.
+ if (this.horizontal_marker_) {
+ var baseline = this.horizontal_marker_.value;
+ var delta = yValue - baseline
+ var fraction = delta / baseline; // allow division by 0
+
+ var deltaStr = (delta >= 0 ? "+" : "") + delta.toFixed(0) + " " +
+ this.units_;
+ var percentStr = (fraction >= 0 ? "+" : "") +
+ (fraction * 100).toFixed(3) + "%";
+
+ this.baseline_deltas_td_.innerHTML = deltaStr + ": " + percentStr;
+ }
+
+ this.updateRuler_(evt);
+ this.updateCursor_();
+};
+
+Plotter.prototype.onMouseClick_ = function(evt) {
+ // Shift-click controls the horizontal reference line.
+ if (evt.shiftKey) {
+ if (this.horizontal_marker_) {
+ this.horizontal_marker_.remove_();
+ }
+
+ var canvasY = evt.clientY - this.canvasRectangle.offsetTop;
+ this.horizontal_marker_ = new HorizontalMarker(this.canvasRectangle,
+ evt.clientY, this.coordinates.yValue(canvasY));
+
+ // Insert before cursor node, otherwise it catches clicks.
+ this.cursor_div_.parentNode.insertBefore(
+ this.horizontal_marker_.markerDiv_, this.cursor_div_);
+ } else {
+ var index = this.current_index_;
+ var m = this.marker_div_;
+ var c = this.cursor_div_;
+ m.style.top = c.style.top;
+ m.style.left = c.style.left;
+ m.style.width = c.style.width;
+ m.style.height = c.style.height;
+ if ("onclick" in this) {
+ var this_x = this.clNumbers_[index];
+ var prev_x = index > 0 ? (parseInt(this.clNumbers_[index-1]) + 1) :
+ this_x;
+ this.onclick(prev_x, this_x);
+ }
+ }
+};
+
+Plotter.prototype.canvas = function() {
+ var canvas = document.createElement("CANVAS");
+ canvas.setAttribute("id", "_canvas");
+ canvas.setAttribute("class", "plot");
+ canvas.setAttribute("width", this.coordinates.widthMax);
+ canvas.setAttribute("height", this.coordinates.heightMax);
+ return canvas;
+};
+
+Plotter.prototype.ruler = function() {
+ ruler = document.createElement("DIV");
+ ruler.setAttribute("class", "plot-ruler");
+ ruler.style.borderBottom = "1px dotted black";
+ ruler.style.position = "absolute";
+ ruler.style.left = "-2px";
+ ruler.style.top = "-2px";
+ ruler.style.width = "0px";
+ ruler.style.height = "0px";
+ return ruler;
+};
+
+Plotter.prototype.coordinates_ = function() {
+ var coordinatesDiv = document.createElement("DIV");
+ var table_html =
+ "<table border=0 width='100%'><tbody><tr>" +
+ "<td colspan=2 class='legend'>Legend: ";
+ for (var i = 0; i < this.dataDescription_.length; i++) {
+ if (i > 0)
+ table_html += ", ";
+ table_html += "<span class='legend_item' style='color:" +
+ this.nextColor(i) + "'>" + this.dataDescription_[i] + "</span>";
+ }
+ table_html += "</td></tr><tr>" +
+ "<td class='plot-coordinates'><i>move mouse over graph</i></td>" +
+ "<td align=right style='color: " + HorizontalMarker.COLOR +
+ "'><i>Shift-click to place baseline</i></td>" +
+ "</tr></tbody></table>";
+ coordinatesDiv.innerHTML = table_html;
+
+ var tr = coordinatesDiv.firstChild.firstChild.childNodes[1];
+ this.coordinates_td_ = tr.childNodes[0];
+ this.baseline_deltas_td_ = tr.childNodes[1];
+
+ return coordinatesDiv;
+};
+
+Plotter.prototype.nextColor = function(i) {
+ var index = i % this.colors.length;
+ return "rgb(" + this.colors[index][0] + "," +
+ this.colors[index][1] + "," +
+ this.colors[index][2] + ")";
+};
+
+Plotter.prototype.log = function(val) {
+ document.getElementById('log').appendChild(
+ document.createTextNode(val + '\n'));
+};