summaryrefslogtreecommitdiff
path: root/tools/regression/src
diff options
context:
space:
mode:
Diffstat (limited to 'tools/regression/src')
-rwxr-xr-xtools/regression/src/boost_svn_export_archive.sh70
-rw-r--r--tools/regression/src/collect_and_upload_logs.py546
-rw-r--r--tools/regression/src/compiler_status.cpp1113
-rw-r--r--tools/regression/src/detail/tiny_xml.cpp167
-rw-r--r--tools/regression/src/detail/tiny_xml.hpp70
-rw-r--r--tools/regression/src/detail/tiny_xml_test.cpp17
-rw-r--r--tools/regression/src/detail/tiny_xml_test.txt17
-rw-r--r--tools/regression/src/library_status.cpp901
-rw-r--r--tools/regression/src/library_test.bat21
-rwxr-xr-xtools/regression/src/library_test.sh19
-rwxr-xr-xtools/regression/src/library_test_all.sh85
-rw-r--r--tools/regression/src/process_jam_log.cpp893
-rwxr-xr-xtools/regression/src/process_jam_log.py468
-rw-r--r--tools/regression/src/regression-logs.pl197
-rw-r--r--tools/regression/src/regression.py908
-rw-r--r--tools/regression/src/run.py60
-rw-r--r--tools/regression/src/run_tests.sh197
-rwxr-xr-xtools/regression/src/smoke.py197
18 files changed, 5946 insertions, 0 deletions
diff --git a/tools/regression/src/boost_svn_export_archive.sh b/tools/regression/src/boost_svn_export_archive.sh
new file mode 100755
index 0000000000..e86d377ab9
--- /dev/null
+++ b/tools/regression/src/boost_svn_export_archive.sh
@@ -0,0 +1,70 @@
+#!/bin/sh
+
+#~ Copyright Redshift Software, Inc. 2007
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+export PATH=/bin:/usr/bin:${PATH}
+
+timestamp=`date +%F-%H-%M-%S-%Z`
+branch=$1
+revision=`svn info file:///home/subversion/boost/${branch} | grep '^Revision:' | cut --byte=11-`
+tag=boost-${1/\/*}-${timestamp}
+export_dir=boost-$$
+
+# Remove files as listed in stdin, the assumption is that processing
+# of the file is complete and can be removed.
+rm_c()
+{
+ while read f; do
+ rm -f ${f}
+ done
+}
+# Generate the export file tree, and incrementally output the files
+# created.
+svn_export()
+{
+ svn export -r ${revision} file:///home/subversion/boost/${branch} ${tag}
+ echo "Revision: ${revision}" > ${tag}/svn_info.txt
+ echo "---- ${tag}/svn_info.txt"
+}
+# Create the archive incrementally, deleting files as we are done
+# adding them to the archive.
+make_archive()
+{
+ svn_export \
+ | cut --bytes=6- \
+ | star -c -D -to-stdout -d artype=pax list=- 2>/dev/null \
+ | bzip2 -6 -c \
+ | tee $1 \
+ | tar -jtf - \
+ | rm_c
+}
+
+run()
+{
+ cd /tmp
+ rm -rf ${export_dir}
+ mkdir ${export_dir}
+ cd ${export_dir}
+ mkfifo out.tbz2
+ make_archive out.tbz2 &
+ cat out.tbz2
+ cd /tmp
+ rm -rf ${export_dir}
+}
+
+run_debug()
+{
+ rm -rf ${export_dir}
+ mkdir ${export_dir}
+ cd ${export_dir}
+ mkfifo out.tbz2
+ make_archive out.tbz2 &
+ cat out.tbz2 > ../${tag}.tar.bz2
+ cd ..
+ rm -rf ${export_dir}
+}
+
+run
+#run_debug
diff --git a/tools/regression/src/collect_and_upload_logs.py b/tools/regression/src/collect_and_upload_logs.py
new file mode 100644
index 0000000000..7f1345bd6f
--- /dev/null
+++ b/tools/regression/src/collect_and_upload_logs.py
@@ -0,0 +1,546 @@
+
+# Copyright (c) MetaCommunications, Inc. 2003-2007
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import xml.sax.saxutils
+import zipfile
+import ftplib
+import time
+import stat
+import xml.dom.minidom
+import xmlrpclib
+import httplib
+
+import os.path
+import string
+import sys
+import re
+import urlparse
+
+
+def process_xml_file( input_file, output_file ):
+ utils.log( 'Processing test log "%s"' % input_file )
+
+ f = open( input_file, 'r' )
+ xml = f.readlines()
+ f.close()
+
+ for i in range( 0, len(xml)):
+ xml[i] = string.translate( xml[i], utils.char_translation_table )
+
+ output_file.writelines( xml )
+
+
+def process_test_log_files( output_file, dir, names ):
+ for file in names:
+ if os.path.basename( file ) == 'test_log.xml':
+ process_xml_file( os.path.join( dir, file ), output_file )
+
+
+def collect_test_logs( input_dirs, test_results_writer ):
+ __log__ = 1
+ utils.log( 'Collecting test logs ...' )
+ for input_dir in input_dirs:
+ utils.log( 'Walking directory "%s" ...' % input_dir )
+ os.path.walk( input_dir, process_test_log_files, test_results_writer )
+
+dart_status_from_result = {
+ 'succeed': 'passed',
+ 'fail': 'failed',
+ 'note': 'passed',
+ '': 'notrun'
+ }
+
+dart_project = {
+ 'trunk': 'Boost_HEAD',
+ '': 'Boost_HEAD'
+ }
+
+dart_track = {
+ 'full': 'Nightly',
+ 'incremental': 'Continuous',
+ '': 'Experimental'
+ }
+
+ascii_only_table = ""
+for i in range(0,256):
+ if chr(i) == '\n' or chr(i) == '\r':
+ ascii_only_table += chr(i)
+ elif i < 32 or i >= 0x80:
+ ascii_only_table += '?'
+ else:
+ ascii_only_table += chr(i)
+
+class xmlrpcProxyTransport(xmlrpclib.Transport):
+ def __init__(self, proxy):
+ self.proxy = proxy
+ def make_connection(self, host):
+ self.realhost = host
+ return httplib.HTTP(self.proxy)
+ def send_request(self, connection, handler, request_body):
+ connection.putrequest('POST','http://%s%s' % (self.realhost,handler))
+ def send_host(self, connection, host):
+ connection.putheader('Host',self.realhost)
+
+
+def publish_test_logs(
+ input_dirs,
+ runner_id, tag, platform, comment_file, timestamp, user, source, run_type,
+ dart_server = None,
+ http_proxy = None,
+ **unused
+ ):
+ __log__ = 1
+ utils.log( 'Publishing test logs ...' )
+ dart_rpc = None
+ dart_dom = {}
+
+ def _publish_test_log_files_ ( unused, dir, names ):
+ for file in names:
+ if os.path.basename( file ) == 'test_log.xml':
+ utils.log( 'Publishing test log "%s"' % os.path.join(dir,file) )
+ if dart_server:
+ log_xml = open(os.path.join(dir,file)).read().translate(ascii_only_table)
+ #~ utils.log( '--- XML:\n%s' % log_xml)
+ #~ It seems possible to get an empty XML result file :-(
+ if log_xml == "": continue
+ log_dom = xml.dom.minidom.parseString(log_xml)
+ test = {
+ 'library': log_dom.documentElement.getAttribute('library'),
+ 'test-name': log_dom.documentElement.getAttribute('test-name'),
+ 'toolset': log_dom.documentElement.getAttribute('toolset')
+ }
+ if not test['test-name'] or test['test-name'] == '':
+ test['test-name'] = 'unknown'
+ if not test['toolset'] or test['toolset'] == '':
+ test['toolset'] = 'unknown'
+ if not dart_dom.has_key(test['toolset']):
+ dart_dom[test['toolset']] = xml.dom.minidom.parseString(
+'''<?xml version="1.0" encoding="UTF-8"?>
+<DartSubmission version="2.0" createdby="collect_and_upload_logs.py">
+ <Site>%(site)s</Site>
+ <BuildName>%(buildname)s</BuildName>
+ <Track>%(track)s</Track>
+ <DateTimeStamp>%(datetimestamp)s</DateTimeStamp>
+</DartSubmission>
+''' % {
+ 'site': runner_id,
+ 'buildname': "%s -- %s (%s)" % (platform,test['toolset'],run_type),
+ 'track': dart_track[run_type],
+ 'datetimestamp' : timestamp
+ } )
+ submission_dom = dart_dom[test['toolset']]
+ for node in log_dom.documentElement.childNodes:
+ if node.nodeType == xml.dom.Node.ELEMENT_NODE:
+ if node.firstChild:
+ log_data = xml.sax.saxutils.escape(node.firstChild.data)
+ else:
+ log_data = ''
+ test_dom = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?>
+<Test>
+ <Name>.Test.Boost.%(tag)s.%(library)s.%(test-name)s.%(type)s</Name>
+ <Status>%(result)s</Status>
+ <Measurement name="Toolset" type="text/string">%(toolset)s</Measurement>
+ <Measurement name="Timestamp" type="text/string">%(timestamp)s</Measurement>
+ <Measurement name="Log" type="text/text">%(log)s</Measurement>
+</Test>
+ ''' % {
+ 'tag': tag,
+ 'library': test['library'],
+ 'test-name': test['test-name'],
+ 'toolset': test['toolset'],
+ 'type': node.nodeName,
+ 'result': dart_status_from_result[node.getAttribute('result')],
+ 'timestamp': node.getAttribute('timestamp'),
+ 'log': log_data
+ })
+ submission_dom.documentElement.appendChild(
+ test_dom.documentElement.cloneNode(1) )
+
+ for input_dir in input_dirs:
+ utils.log( 'Walking directory "%s" ...' % input_dir )
+ os.path.walk( input_dir, _publish_test_log_files_, None )
+ if dart_server:
+ try:
+ rpc_transport = None
+ if http_proxy:
+ rpc_transport = xmlrpcProxyTransport(http_proxy)
+ dart_rpc = xmlrpclib.ServerProxy(
+ 'http://%s/%s/Command/' % (dart_server,dart_project[tag]),
+ rpc_transport )
+ for dom in dart_dom.values():
+ #~ utils.log('Dart XML: %s' % dom.toxml('utf-8'))
+ dart_rpc.Submit.put(xmlrpclib.Binary(dom.toxml('utf-8')))
+ except Exception, e:
+ utils.log('Dart server error: %s' % e)
+
+
+def upload_to_ftp( tag, results_file, ftp_proxy, debug_level, ftp_url ):
+
+ if not ftp_url:
+ ftp_host = 'boost.cowic.de'
+ ftp_url = ''.join(['ftp','://anonymous','@',ftp_host,'/boost/do-not-publish-this-url/results/'])
+ utils.log( 'Uploading log archive "%s" to %s/%s' % ( results_file, ftp_url, tag ) )
+
+ ftp_parts = urlparse.urlparse(ftp_url)
+ ftp_netloc = re.split('[@]',ftp_parts[1])
+ ftp_user = re.split('[:]',ftp_netloc[0])[0]
+ ftp_password = re.split('[:]',ftp_netloc[0]+':anonymous')[1]
+ ftp_site = re.split('[:]',ftp_netloc[1])[0]
+ ftp_path = ftp_parts[2]
+
+ if not ftp_proxy:
+ ftp = ftplib.FTP( ftp_site )
+ ftp.set_debuglevel( debug_level )
+ ftp.login( ftp_user, ftp_password )
+ else:
+ utils.log( ' Connecting through FTP proxy server "%s"' % ftp_proxy )
+ ftp = ftplib.FTP( ftp_proxy )
+ ftp.set_debuglevel( debug_level )
+ ftp.set_pasv (0) # turn off PASV mode
+ ftp.login( '%s@%s' % (ftp_user,ftp_site), ftp_password )
+
+ ftp.cwd( ftp_path )
+ try:
+ ftp.cwd( tag )
+ except ftplib.error_perm:
+ for dir in tag.split( '/' ):
+ ftp.mkd( dir )
+ ftp.cwd( dir )
+
+ f = open( results_file, 'rb' )
+ ftp.storbinary( 'STOR %s' % os.path.basename( results_file ), f )
+ ftp.quit()
+
+
+def copy_comments( results_xml, comment_file ):
+ results_xml.startElement( 'comment', {} )
+
+ if os.path.exists( comment_file ):
+ utils.log( 'Reading comments file "%s"...' % comment_file )
+ f = open( comment_file, 'r' )
+ try:
+ results_xml.characters( f.read() )
+ finally:
+ f.close()
+ else:
+ utils.log( 'Warning: comment file "%s" is not found.' % comment_file )
+
+ lines = ['']
+ for arg in sys.argv:
+ # Make sure that the ftp details are hidden
+ arg = re.sub( 'ftp://.*$', 'ftp://XXXXX', arg )
+
+ # Escape quotes
+ arg = re.sub( r'(\\|")', r'\\\1', arg )
+
+ # Quote arguments if needed
+ if arg.find( ' ' ) != -1:
+ arg = '"%s"' % arg
+ if len( lines[-1] ) + len( arg ) + 2 >= 80:
+ # align backslashes
+ lines[-1] += ' ' * ( 79 - len( lines[-1] ) )
+ # indent lines after the first
+ lines.append( ' ' )
+ lines[-1] += ( arg + ' ' )
+
+ results_xml.characters( '<hr>' )
+ results_xml.characters( '<dl>' )
+ results_xml.characters( '<dt>Command Line</dt>' )
+ results_xml.characters( '<dd>' )
+ results_xml.characters( '<pre>' )
+ results_xml.characters( '\\\n'.join(lines) )
+ results_xml.characters( '</pre>' )
+ results_xml.characters( '</dd>' )
+ results_xml.characters( '</dl>\n' )
+
+ results_xml.endElement( 'comment' )
+
+
+def compress_file( file_path, archive_path ):
+ utils.log( 'Compressing "%s"...' % file_path )
+
+ try:
+ z = zipfile.ZipFile( archive_path, 'w', zipfile.ZIP_DEFLATED )
+ z.write( file_path, os.path.basename( file_path ) )
+ z.close()
+ utils.log( 'Done writing "%s".'% archive_path )
+ except Exception, msg:
+ utils.log( 'Warning: Compressing falied (%s)' % msg )
+ utils.log( ' Trying to compress using a platform-specific tool...' )
+ try: import zip_cmd
+ except ImportError:
+ script_dir = os.path.dirname( os.path.abspath( sys.argv[0] ) )
+ utils.log( 'Could not find \'zip_cmd\' module in the script directory (%s).' % script_dir )
+ raise Exception( 'Compressing failed!' )
+ else:
+ if os.path.exists( archive_path ):
+ os.unlink( archive_path )
+ utils.log( 'Removing stale "%s".' % archive_path )
+
+ zip_cmd.main( file_path, archive_path )
+ utils.log( 'Done compressing "%s".' % archive_path )
+
+
+def read_timestamp( file ):
+ if not os.path.exists( file ):
+ result = time.gmtime()
+ utils.log( 'Warning: timestamp file "%s" does not exist'% file )
+ utils.log( 'Using current UTC time (%s)' % result )
+ return result
+
+ return time.gmtime( os.stat( file ).st_mtime )
+
+
+def collect_logs(
+ results_dir
+ , runner_id
+ , tag
+ , platform
+ , comment_file
+ , timestamp_file
+ , user
+ , source
+ , run_type
+ , dart_server = None
+ , http_proxy = None
+ , revision = ''
+ , **unused
+ ):
+
+ timestamp = time.strftime( '%Y-%m-%dT%H:%M:%SZ', read_timestamp( timestamp_file ) )
+
+ if dart_server:
+ publish_test_logs( [ results_dir ],
+ runner_id, tag, platform, comment_file, timestamp, user, source, run_type,
+ dart_server = dart_server,
+ http_proxy = http_proxy )
+
+ results_file = os.path.join( results_dir, '%s.xml' % runner_id )
+ results_writer = open( results_file, 'w' )
+ utils.log( 'Collecting test logs into "%s"...' % results_file )
+
+ results_xml = xml.sax.saxutils.XMLGenerator( results_writer )
+ results_xml.startDocument()
+ results_xml.startElement(
+ 'test-run'
+ , {
+ 'tag': tag
+ , 'platform': platform
+ , 'runner': runner_id
+ , 'timestamp': timestamp
+ , 'source': source
+ , 'run-type': run_type
+ , 'revision': revision
+ }
+ )
+
+ copy_comments( results_xml, comment_file )
+ collect_test_logs( [ results_dir ], results_writer )
+
+ results_xml.endElement( "test-run" )
+ results_xml.endDocument()
+ results_writer.close()
+ utils.log( 'Done writing "%s".' % results_file )
+
+ compress_file(
+ results_file
+ , os.path.join( results_dir,'%s.zip' % runner_id )
+ )
+
+
+def upload_logs(
+ results_dir
+ , runner_id
+ , tag
+ , user
+ , ftp_proxy
+ , debug_level
+ , send_bjam_log = False
+ , timestamp_file = None
+ , dart_server = None
+ , ftp_url = None
+ , **unused
+ ):
+
+ logs_archive = os.path.join( results_dir, '%s.zip' % runner_id )
+ upload_to_ftp( tag, logs_archive, ftp_proxy, debug_level, ftp_url )
+ if send_bjam_log:
+ bjam_log_path = os.path.join( results_dir, 'bjam.log' )
+ if not timestamp_file:
+ timestamp_file = bjam_log_path
+
+ timestamp = time.strftime( '%Y-%m-%d-%H-%M-%S', read_timestamp( timestamp_file ) )
+ logs_archive = os.path.join( results_dir, '%s.%s.log.zip' % ( runner_id, timestamp ) )
+ compress_file( bjam_log_path, logs_archive )
+ upload_to_ftp( '%s/logs' % tag, logs_archive, ftp_proxy, debug_level, ftp_url )
+
+
+def collect_and_upload_logs(
+ results_dir
+ , runner_id
+ , tag
+ , platform
+ , comment_file
+ , timestamp_file
+ , user
+ , source
+ , run_type
+ , revision = None
+ , ftp_proxy = None
+ , debug_level = 0
+ , send_bjam_log = False
+ , dart_server = None
+ , http_proxy = None
+ , ftp_url = None
+ , **unused
+ ):
+
+ collect_logs(
+ results_dir
+ , runner_id
+ , tag
+ , platform
+ , comment_file
+ , timestamp_file
+ , user
+ , source
+ , run_type
+ , revision = revision
+ , dart_server = dart_server
+ , http_proxy = http_proxy
+ )
+
+ upload_logs(
+ results_dir
+ , runner_id
+ , tag
+ , user
+ , ftp_proxy
+ , debug_level
+ , send_bjam_log
+ , timestamp_file
+ , dart_server = dart_server
+ , ftp_url = ftp_url
+ )
+
+
+def accept_args( args ):
+ args_spec = [
+ 'locate-root='
+ , 'runner='
+ , 'tag='
+ , 'platform='
+ , 'comment='
+ , 'timestamp='
+ , 'source='
+ , 'run-type='
+ , 'user='
+ , 'ftp-proxy='
+ , 'proxy='
+ , 'debug-level='
+ , 'send-bjam-log'
+ , 'help'
+ , 'dart-server='
+ , 'revision='
+ , 'ftp='
+ ]
+
+ options = {
+ '--tag' : 'trunk'
+ , '--platform' : sys.platform
+ , '--comment' : 'comment.html'
+ , '--timestamp' : 'timestamp'
+ , '--user' : None
+ , '--source' : 'SVN'
+ , '--run-type' : 'full'
+ , '--ftp-proxy' : None
+ , '--proxy' : None
+ , '--debug-level' : 0
+ , '--dart-server' : 'beta.boost.org:8081'
+ , '--revision' : None
+ , '--ftp' : None
+
+ }
+
+ utils.accept_args( args_spec, args, options, usage )
+
+ return {
+ 'results_dir' : options[ '--locate-root' ]
+ , 'runner_id' : options[ '--runner' ]
+ , 'tag' : options[ '--tag' ]
+ , 'platform' : options[ '--platform']
+ , 'comment_file' : options[ '--comment' ]
+ , 'timestamp_file' : options[ '--timestamp' ]
+ , 'user' : options[ '--user' ]
+ , 'source' : options[ '--source' ]
+ , 'run_type' : options[ '--run-type' ]
+ , 'ftp_proxy' : options[ '--ftp-proxy' ]
+ , 'http_proxy' : options[ '--proxy' ]
+ , 'debug_level' : int(options[ '--debug-level' ])
+ , 'send_bjam_log' : options.has_key( '--send-bjam-log' )
+ , 'dart_server' : options[ '--dart-server' ]
+ , 'revision' : options[ '--revision' ]
+ , 'ftp' : options[ '--ftp' ]
+ }
+
+
+commands = {
+ 'collect-and-upload' : collect_and_upload_logs
+ , 'collect-logs' : collect_logs
+ , 'upload-logs' : upload_logs
+ }
+
+def usage():
+ print 'Usage: %s [command] [options]' % os.path.basename( sys.argv[0] )
+ print '''
+Commands:
+\t%s
+
+Options:
+\t--locate-root directory to to scan for "test_log.xml" files
+\t--runner runner ID (e.g. "Metacomm")
+\t--timestamp path to a file which modification time will be used
+\t as a timestamp of the run ("timestamp" by default)
+\t--comment an HTML comment file to be inserted in the reports
+\t ("comment.html" by default)
+\t--tag the tag for the results ("trunk" by default)
+\t--user SourceForge user name for a shell account (optional)
+\t--source where Boost sources came from ("SVN" or "tarball";
+\t "SVN" by default)
+\t--run-type "incremental" or "full" ("full" by default)
+\t--send-bjam-log in addition to regular XML results, send in full bjam
+\t log of the regression run
+\t--proxy HTTP proxy server address and port (e.g.
+\t 'http://www.someproxy.com:3128', optional)
+\t--ftp-proxy FTP proxy server (e.g. 'ftpproxy', optional)
+\t--debug-level debugging level; controls the amount of debugging
+\t output printed; 0 by default (no debug output)
+\t--dart-server The dart server to send results to.
+\t--ftp The ftp URL to upload results to.
+''' % '\n\t'.join( commands.keys() )
+
+
+def main():
+ if len(sys.argv) > 1 and sys.argv[1] in commands:
+ command = sys.argv[1]
+ args = sys.argv[ 2: ]
+ else:
+ command = 'collect-and-upload'
+ args = sys.argv[ 1: ]
+
+ commands[ command ]( **accept_args( args ) )
+
+
+if __name__ != '__main__': import utils
+else:
+ # in absense of relative import...
+ xsl_path = os.path.abspath( os.path.dirname( sys.argv[ 0 ] ) )
+ while os.path.basename( xsl_path ) != 'xsl_reports': xsl_path = os.path.dirname( xsl_path )
+ sys.path.append( xsl_path )
+
+ import utils
+ main()
diff --git a/tools/regression/src/compiler_status.cpp b/tools/regression/src/compiler_status.cpp
new file mode 100644
index 0000000000..c4444bbcc6
--- /dev/null
+++ b/tools/regression/src/compiler_status.cpp
@@ -0,0 +1,1113 @@
+// Generate Compiler Status HTML from jam regression test output -----------//
+
+// Copyright Beman Dawes 2002.
+
+// Distributed under the Boost Software License, Version 1.0.
+// See http://www.boost.org/LICENSE_1_0.txt
+
+// See http://www.boost.org/tools/regression/ for documentation.
+
+/*******************************************************************************
+
+ Please contact the maintainer, bdawes <at> acm <dot> org, before making
+ any non-trivial changes.
+
+ This program was designed to work unchanged on all platforms and
+ configurations. All output which is platform or configuration dependent
+ is obtained from external sources such as the .xml file from
+ process_jam_log execution, the tools/build/xxx-tools.jam files, or the
+ output of the config_info tests.
+
+ Please avoid adding platform or configuration dependencies during
+ program maintenance.
+
+*******************************************************************************/
+
+#include <boost/config/warning_disable.hpp>
+
+#include "boost/config.hpp"
+#include "boost/filesystem/operations.hpp"
+#include "boost/filesystem/convenience.hpp"
+#include "boost/filesystem/fstream.hpp"
+#include "detail/tiny_xml.hpp"
+namespace fs = boost::filesystem;
+namespace xml = boost::tiny_xml;
+
+#include <cstdlib> // for abort, exit
+#include <cctype> // for toupper
+#include <string>
+#include <vector>
+#include <set>
+#include <map>
+#include <algorithm>
+#include <iostream>
+#include <fstream>
+#include <ctime>
+#include <stdexcept>
+#include <cassert>
+
+using std::string;
+
+const string pass_msg( "Pass" );
+const string warn_msg( "<i>Warn</i>" );
+const string fail_msg( "<font color=\"#FF0000\"><i>Fail</i></font>" );
+const string note_msg( "<sup>*</sup>" );
+const string missing_residue_msg( "<i>Missing</i>" );
+
+const std::size_t max_compile_msg_size = 10000;
+
+namespace
+{
+ fs::path boost_root; // boost-root complete path
+ fs::path locate_root; // locate-root (AKA ALL_LOCATE_TARGET) complete path
+
+ bool compile_time;
+ bool run_time;
+ bool ignore_pass;
+ bool no_warn;
+ bool no_links;
+ bool boost_build_v2 = true;
+
+ fs::path jamfile_path;
+
+ fs::directory_iterator end_itr;
+
+ // It's immportant for reliability that we find the same compilers for each
+ // test, and that they match the column header. So save the names at the
+ // time column headings are generated.
+ std::vector<string> toolsets;
+
+ fs::ifstream jamfile;
+ fs::ofstream report;
+ fs::ofstream links_file;
+ string links_name;
+
+ fs::path notes_path;
+ string notes_html;
+
+ fs::path notes_map_path;
+ typedef std::multimap< string, string > notes_map; // key is test_name-toolset,
+ // value is note bookmark
+ notes_map notes;
+
+ string specific_compiler; // if running on one toolset only
+
+ const string empty_string;
+
+ std::vector<int> error_count;
+
+ // prefix for library and test hyperlink prefix
+ string svn_root ( "http://svn.boost.org/trac/boost/browser/trunk/" );
+ string url_prefix_dir_view( svn_root );
+ string url_prefix_checkout_view( svn_root );
+ string url_suffix_text_view( "" );
+
+// get revision number (as a string) if boost_root is svn working copy -----//
+
+ string revision( const fs::path & boost_root )
+ {
+ string rev;
+ fs::path entries( boost_root / ".svn" / "entries" );
+ fs::ifstream entries_file( entries );
+ if ( entries_file )
+ {
+ std::getline( entries_file, rev );
+ std::getline( entries_file, rev );
+ std::getline( entries_file, rev );
+ std::getline( entries_file, rev ); // revision number as a string
+ }
+ return rev;
+ }
+
+
+// build notes_bookmarks from notes HTML -----------------------------------//
+
+ void build_notes_bookmarks()
+ {
+ if ( notes_map_path.empty() ) return;
+ fs::ifstream notes_map_file( notes_map_path );
+ if ( !notes_map_file )
+ {
+ std::cerr << "Could not open --notes-map input file: " << notes_map_path.string() << std::endl;
+ std::exit( 1 );
+ }
+ string line;
+ while( std::getline( notes_map_file, line ) )
+ {
+ string::size_type pos = 0;
+ if ( (pos = line.find( ',', pos )) == string::npos ) continue;
+ string key(line.substr( 0, pos ) );
+ string bookmark( line.substr( pos+1 ) );
+
+// std::cout << "inserting \"" << key << "\",\"" << bookmark << "\"\n";
+ notes.insert( notes_map::value_type( key, bookmark ) );
+ }
+ }
+
+// load_notes_html ---------------------------------------------------------//
+
+ bool load_notes_html()
+ {
+ if ( notes_path.empty() ) return false;
+ fs::ifstream notes_file( notes_path );
+ if ( !notes_file )
+ {
+ std::cerr << "Could not open --notes input file: " << notes_path.string() << std::endl;
+ std::exit( 1 );
+ }
+ string line;
+ bool in_body( false );
+ while( std::getline( notes_file, line ) )
+ {
+ if ( in_body && line.find( "</body>" ) != string::npos ) in_body = false;
+ if ( in_body ) notes_html += line;
+ else if ( line.find( "<body>" ) ) in_body = true;
+ }
+ return true;
+ }
+
+// relative path between two paths -----------------------------------------//
+
+ void relative_path( const fs::path & from, const fs::path & to,
+ fs::path & target )
+ {
+ if ( from.string().size() <= to.string().size() ) return;
+ target /= "..";
+ relative_path( from.branch_path(), to, target );
+ return;
+ }
+
+// extract object library name from target directory string ----------------//
+
+ string extract_object_library_name( const string & s )
+ {
+ string t( s );
+ string::size_type pos = t.find( "/build/" );
+ if ( pos != string::npos ) pos += 7;
+ else if ( (pos = t.find( "/test/" )) != string::npos ) pos += 6;
+ else return "";
+ return t.substr( pos, t.find( "/", pos ) - pos );
+ }
+
+// find_file ---------------------------------------------------------------//
+// given a directory to recursively search
+
+ bool find_file( const fs::path & dir_path, const string & name,
+ fs::path & path_found, const string & ignore_dir_named="" )
+ {
+ if ( !fs::exists( dir_path ) ) return false;
+ for ( fs::directory_iterator itr( dir_path ); itr != end_itr; ++itr )
+ if ( fs::is_directory( *itr )
+ && itr->path().filename() != ignore_dir_named )
+ {
+ if ( find_file( *itr, name, path_found ) ) return true;
+ }
+ else if ( itr->path().filename() == name )
+ {
+ path_found = *itr;
+ return true;
+ }
+ return false;
+ }
+
+// platform_desc -----------------------------------------------------------//
+
+ string platform_desc()
+ {
+ string result = BOOST_PLATFORM;
+ result[0] = std::toupper( result[0] );
+ return result;
+ }
+
+// version_desc ------------------------------------------------------------//
+// from locate-root/status/bin/config_info.test/xxx/.../config_info.output
+
+ string version_desc( const string & compiler_name )
+ {
+ string result;
+ fs::path dot_output_path;
+ if ( find_file( locate_root / "bin/boost/status/config_info.test"
+ / compiler_name, "config_info.output", dot_output_path )
+ || find_file( locate_root / "status/bin/config_info.test"
+ / compiler_name, "config_info.output", dot_output_path ) )
+ {
+ fs::ifstream file( dot_output_path );
+ if ( file )
+ {
+ if( std::getline( file, result ) )
+ {
+ string::size_type pos = result.find( "version " );
+ if ( pos != string::npos )
+ {
+ result.erase( 0, pos+8 );
+ }
+ else result.clear();
+ }
+ }
+ }
+ return result;
+ }
+
+// compiler_desc -----------------------------------------------------------//
+// from boost-root/tools/build/xxx-tools.jam
+
+ string compiler_desc( const string & compiler_name )
+ {
+ string result;
+ fs::path tools_path( boost_root / "tools/build/v1" / (compiler_name
+ + "-tools.jam") );
+ if ( !fs::exists( tools_path ) )
+ tools_path = boost_root / "tools/build" / (compiler_name + "-tools.jam");
+ fs::ifstream file( tools_path );
+ if ( file )
+ {
+ while( std::getline( file, result ) )
+ {
+ if ( result.substr( 0, 3 ) == "#//" )
+ {
+ result.erase( 0, 3 );
+ return result;
+ }
+ }
+ result.clear();
+ }
+ return result;
+ }
+
+// target_directory --------------------------------------------------------//
+// this amounts to a request to find a unique leaf directory
+
+ fs::path target_directory( const fs::path & root )
+ {
+ if ( !fs::exists( root ) ) return fs::path("no-such-path");
+ fs::path child;
+ for ( fs::directory_iterator itr( root ); itr != end_itr; ++itr )
+ {
+ if ( fs::is_directory( *itr ) )
+ {
+ // SunCC creates an internal subdirectory everywhere it writes
+ // object files. This confuses the target_directory() algorithm.
+ // This patch ignores the SunCC internal directory. Jens Maurer
+ if ( itr->path().filename() == "SunWS_cache" ) continue;
+ // SGI does something similar for template instantiations. Jens Maurer
+ if( itr->path().filename() == "ii_files" ) continue;
+
+ if ( child.empty() ) child = *itr;
+ else
+ {
+ std::cout << "Warning: only first of two target possibilities will be reported for: \n "
+ << root.string() << ": " << child.filename()
+ << " and " << itr->path().filename() << "\n";
+ }
+ }
+ }
+ if ( child.empty() ) return root; // this dir has no children
+ return target_directory( child );
+ }
+
+// element_content ---------------------------------------------------------//
+
+ const string & element_content(
+ const xml::element & root, const string & name )
+ {
+ static string empty_string;
+ xml::element_list::const_iterator itr;
+ for ( itr = root.elements.begin();
+ itr != root.elements.end() && (*itr)->name != name;
+ ++itr ) {}
+ return itr != root.elements.end() ? (*itr)->content : empty_string;
+ }
+
+// find_element ------------------------------------------------------------//
+
+ const xml::element empty_element;
+
+ const xml::element & find_element(
+ const xml::element & root, const string & name )
+ {
+ xml::element_list::const_iterator itr;
+ for ( itr = root.elements.begin();
+ itr != root.elements.end() && (*itr)->name != name;
+ ++itr ) {}
+ return itr != root.elements.end() ? *((*itr).get()) : empty_element;
+ }
+
+// attribute_value ----------------------------------------------------------//
+
+const string & attribute_value( const xml::element & element,
+ const string & attribute_name )
+{
+ static const string empty_string;
+ xml::attribute_list::const_iterator atr;
+ for ( atr = element.attributes.begin();
+ atr != element.attributes.end() && atr->name != attribute_name;
+ ++atr ) {}
+ return atr == element.attributes.end() ? empty_string : atr->value;
+}
+
+// find_bin_path -----------------------------------------------------------//
+
+// Takes a relative path from boost root to a Jamfile.
+// Returns the directory where the build targets from
+// that Jamfile are located. If not found, emits a warning
+// and returns empty path.
+const fs::path find_bin_path(const string& relative)
+{
+ fs::path bin_path;
+ if (boost_build_v2)
+ {
+ if ( relative == "status" )
+ bin_path = locate_root / "bin.v2" / "libs";
+ else
+ {
+ bin_path = locate_root / "bin.v2" / relative;
+ if (!fs::exists(bin_path))
+ bin_path = locate_root / "bin" / relative;
+ }
+ if (!fs::exists(bin_path))
+ {
+ std::cerr << "warning: could not find build results for '"
+ << relative << "'.\n";
+ std::cerr << "warning: tried directory "
+ << bin_path.string() << "\n";
+ bin_path = "";
+ }
+ }
+ else
+ {
+ bin_path = locate_root / "bin/boost" / relative;
+ if (!fs::exists(bin_path))
+ {
+ bin_path = locate_root / "bin" / relative / "bin";
+ if (!fs::exists(bin_path))
+ {
+ bin_path = fs::path( locate_root / relative / "bin" );
+ if (!fs::exists(bin_path))
+ {
+ bin_path = fs::path( locate_root / "bin/boost/libs" /
+ relative.substr( relative.find( '/' )+1 ) );
+ }
+ }
+ }
+ if (!fs::exists(bin_path))
+ {
+ std::cerr << "warning: could not find build results for '"
+ << relative << "'.\n";
+ bin_path = "";
+ }
+ }
+ return bin_path;
+}
+
+
+// generate_report ---------------------------------------------------------//
+
+ // return 0 if nothing generated, 1 otherwise, except 2 if compiler msgs
+ int generate_report( const xml::element & db,
+ const string & source_library_name,
+ const string & test_type,
+ const string & test_name, // possibly object library name
+ const string & toolset,
+ bool pass,
+ bool always_show_run_output = false )
+ {
+ // compile msgs sometimes modified, so make a local copy
+ string compile( ((pass && no_warn)
+ ? empty_string : element_content( db, "compile" )) );
+ const string & link( pass ? empty_string : element_content( db, "link" ) );
+ const string & run( (pass && !always_show_run_output)
+ ? empty_string : element_content( db, "run" ) );
+ string lib( (pass ? empty_string : element_content( db, "lib" )) );
+
+ string::size_type pos;
+ if ( (pos = compile.find("30 DAY EVALUATION LICENSE")) != string::npos )
+ {
+ compile.erase(pos, 25);
+ while ( compile[0] == '\n' || compile[0] == '\r' ) compile.erase(0,1);
+ }
+
+ // some compilers output the filename even if there are no errors or
+ // warnings; detect this if one line of output and it contains no space.
+ pos = compile.find( '\n', 1 );
+ if ( pos != string::npos && compile.size()-pos <= 2
+ && compile.find( ' ' ) == string::npos ) compile.clear();
+
+ if ( lib.empty()
+ && (compile.empty() || test_type == "compile_fail")
+ && link.empty() && run.empty() ) return 0;
+
+ int result = 1; // some kind of msg for sure
+
+ // limit compile message length
+ if ( compile.size() > max_compile_msg_size )
+ {
+ compile.erase( max_compile_msg_size );
+ compile += "...\n (remainder deleted because of excessive size)\n";
+ }
+
+ links_file << "<h2><a name=\""
+ << source_library_name << "-" << test_name << "-" << toolset << "\">"
+ << source_library_name << " - " << test_name << " - " << toolset << "</a></h2>\n";
+
+ if ( !compile.empty() )
+ {
+ ++result;
+ links_file << "<h3>Compiler output:</h3><pre>"
+ << compile << "</pre>\n";
+ }
+ if ( !link.empty() )
+ links_file << "<h3>Linker output:</h3><pre>" << link << "</pre>\n";
+ if ( !run.empty() )
+ links_file << "<h3>Run output:</h3><pre>" << run << "</pre>\n";
+
+ // for an object library failure, generate a reference to the object
+ // library failure message, and (once only) generate the object
+ // library failure message itself
+ static std::set< string > failed_lib_target_dirs; // only generate once
+ if ( !lib.empty() )
+ {
+ if ( lib[0] == '\n' ) lib.erase( 0, 1 );
+ string object_library_name( extract_object_library_name( lib ) );
+
+ // changing the target directory naming scheme breaks
+ // extract_object_library_name()
+ assert( !object_library_name.empty() );
+ if ( object_library_name.empty() )
+ std::cerr << "Failed to extract object library name from " << lib << "\n";
+
+ links_file << "<h3>Library build failure: </h3>\n"
+ "See <a href=\"#"
+ << source_library_name << "-"
+ << object_library_name << "-" << toolset << "\">"
+ << source_library_name << " - "
+ << object_library_name << " - " << toolset << "</a>";
+
+ if ( failed_lib_target_dirs.find( lib ) == failed_lib_target_dirs.end() )
+ {
+ failed_lib_target_dirs.insert( lib );
+ fs::path pth( locate_root / lib / "test_log.xml" );
+ fs::ifstream file( pth );
+ if ( file )
+ {
+ xml::element_ptr db = xml::parse( file, pth.string() );
+ generate_report( *db, source_library_name, test_type, object_library_name, toolset, false );
+ }
+ else
+ {
+ links_file << "<h2><a name=\""
+ << object_library_name << "-" << toolset << "\">"
+ << object_library_name << " - " << toolset << "</a></h2>\n"
+ "test_log.xml not found\n";
+ }
+ }
+ }
+ return result;
+ }
+
+ // add_notes --------------------------------------------------------------//
+
+ void add_notes( const string & key, bool fail, string & sep, string & target )
+ {
+ notes_map::const_iterator itr = notes.lower_bound( key );
+ if ( itr != notes.end() && itr->first == key )
+ {
+ for ( ; itr != notes.end() && itr->first == key; ++itr )
+ {
+ string note_desc( itr->second[0] == '-'
+ ? itr->second.substr( 1 ) : itr->second );
+ if ( fail || itr->second[0] == '-' )
+ {
+ target += sep;
+ sep = ",";
+ target += "<a href=\"";
+ target += "#";
+ target += note_desc;
+ target += "\">";
+ target += note_desc;
+ target += "</a>";
+ }
+ }
+ }
+ }
+
+ // get_notes -------------------------------------------------------------//
+
+ string get_notes( const string & toolset,
+ const string & library, const string & test, bool fail )
+ {
+ string sep;
+ string target( "<sup>" );
+ add_notes( toolset + "/" + library + "/" + test, fail, sep, target );
+ add_notes( "*/" + library + "/" + test, fail, sep, target );
+ add_notes( toolset + "/" + library + "/*", fail, sep, target );
+ add_notes( "*/" + library + "/*", fail, sep, target );
+ if ( target == "<sup>" ) target.clear();
+ else target += "</sup>";
+ return target;
+ }
+
+ // do_cell ---------------------------------------------------------------//
+
+ bool do_cell(
+ int compiler,
+ const string & lib_name,
+ const fs::path & test_dir,
+ const string & test_type,
+ const string & test_name,
+ const string & toolset,
+ string & target,
+ bool always_show_run_output )
+ // return true if any results except simple pass_msg
+ {
+ fs::path target_dir( target_directory( test_dir / toolset ) );
+ bool pass = false;
+
+ if ( !fs::exists( target_dir / "test_log.xml" ) )
+ {
+ std::cerr << "Missing test_log.xml in target:\n "
+ << target_dir.string() << "\n";
+ target += "<td>" + missing_residue_msg + "</td>";
+ return true;
+ }
+
+ int anything_generated = 0;
+ bool note = false;
+
+ fs::path pth( target_dir / "test_log.xml" );
+ fs::ifstream file( pth );
+ if ( !file )
+ {
+ std::cerr << "Can't open test_log.xml in target:\n "
+ << target_dir.string() << "\n";
+ target += "<td>" + missing_residue_msg + "</td>";
+ return false;
+ }
+
+ xml::element_ptr dbp = xml::parse( file, pth.string() );
+ const xml::element & db( *dbp );
+
+ std::string test_type_base( test_type );
+ if ( test_type_base == "run_pyd" ) test_type_base = "run";
+ else if ( test_type_base.size() > 5 )
+ {
+ const string::size_type trailer = test_type_base.size() - 5;
+ if ( test_type_base.substr( trailer ) == "_fail" )
+ {
+ test_type_base.erase( trailer );
+ }
+ }
+ const xml::element & test_type_element( find_element( db, test_type_base ) );
+
+ pass = !test_type_element.name.empty()
+ && attribute_value( test_type_element, "result" ) != "fail";
+
+ if ( !no_links )
+ {
+ note = attribute_value( test_type_element, "result" ) == "note";
+
+ // generate bookmarked report of results, and link to it
+ anything_generated
+ = generate_report( db, lib_name, test_type, test_name, toolset, pass,
+ always_show_run_output || note );
+ }
+
+ target += "<td>";
+
+ // generate the status table cell pass/warn/fail HTML
+ if ( anything_generated != 0 )
+ {
+ target += "<a href=\"";
+ target += links_name;
+ target += "#";
+ target += lib_name;
+ target += "-";
+ target += test_name;
+ target += "-";
+ target += toolset;
+ target += "\">";
+ target += pass
+ ? (anything_generated < 2 ? pass_msg : warn_msg)
+ : fail_msg;
+ target += "</a>";
+ if ( pass && note ) target += note_msg;
+ }
+ else target += pass ? pass_msg : fail_msg;
+
+ // if notes, generate the superscript HTML
+ if ( !notes.empty() )
+ target += get_notes( toolset, lib_name, test_name, !pass );
+
+ // generate compile-time if requested
+ if ( compile_time )
+ {
+ const xml::element & compile_element( find_element( db, "compile" ) );
+
+ if ( !compile_element.name.empty() )
+ {
+ string times = attribute_value( compile_element, "timings" );
+ if ( !times.empty() )
+ {
+ target += "<br>";
+ target += times.substr( 0, times.find( " " ) );
+ }
+ }
+ }
+
+ // generate run-time if requested
+ if ( run_time )
+ {
+ const xml::element & run_element( find_element( db, "run" ) );
+
+ if ( !run_element.name.empty() )
+ {
+ string times = attribute_value( run_element, "timings" );
+ if ( !times.empty() )
+ {
+ target += "<br>";
+ target += times.substr( 0, times.find( " " ) );
+ }
+ }
+ }
+
+ if ( !pass ) ++error_count[compiler];
+
+ target += "</td>";
+ return (anything_generated != 0) || !pass;
+ }
+
+// do_row ------------------------------------------------------------------//
+
+ void do_row(
+ const fs::path & test_dir, // locate_root / "status/bin/any_test.test"
+ const string & test_name, // "any_test"
+ string & target )
+ {
+ // get library name, test-type, test-program path, etc., from the .xml file
+ string lib_name;
+ string test_path( test_name ); // test_name is default if missing .test
+ string test_type( "unknown" );
+ bool always_show_run_output( false );
+ fs::path xml_file_path;
+ if ( find_file( test_dir, "test_log.xml", xml_file_path ) )
+ {
+ fs::ifstream file( xml_file_path );
+ if ( file )
+ {
+ xml::element_ptr dbp = xml::parse( file, xml_file_path.string() );
+ const xml::element & db( *dbp );
+ test_path = attribute_value( db, "test-program" );
+ lib_name = attribute_value( db, "library" );
+ test_type = attribute_value( db, "test-type" );
+ always_show_run_output
+ = attribute_value( db, "show-run-output" ) == "true";
+ }
+ }
+
+ // generate the library name, test name, and test type table data
+ string::size_type row_start_pos = target.size();
+ target += "<tr><td><a href=\"" + url_prefix_dir_view + "/libs/" + lib_name
+ + "\">" + lib_name + "</a></td>";
+ target += "<td><a href=\"" + url_prefix_checkout_view + "/" + test_path
+ + url_suffix_text_view + "\">" + test_name + "</a>";
+
+ if ( compile_time ) target += "<br> Compile time:";
+ if ( run_time ) target += "<br> Run time:";
+
+ target += "</td>";
+ target += "<td>" + test_type + "</td>";
+
+ bool no_warn_save = no_warn;
+ //if ( test_type.find( "fail" ) != string::npos ) no_warn = true;
+
+ // for each compiler, generate <td>...</td> html
+ bool anything_to_report = false;
+ int compiler = 0;
+ for ( std::vector<string>::const_iterator itr=toolsets.begin();
+ itr != toolsets.end(); ++itr, ++compiler )
+ {
+ anything_to_report |= do_cell( compiler, lib_name, test_dir, test_type, test_name, *itr, target,
+ always_show_run_output );
+ }
+
+ target += "</tr>";
+ if ( ignore_pass && !anything_to_report ) target.erase( row_start_pos );
+ no_warn = no_warn_save;
+ }
+
+// do_rows_for_sub_tree ----------------------------------------------------//
+
+ void do_rows_for_sub_tree(
+ const fs::path & bin_dir, std::vector<string> & results )
+ {
+ for ( fs::directory_iterator itr( bin_dir ); itr != end_itr; ++itr )
+ {
+ if ( fs::is_directory( *itr )
+ && itr->path().string().find( ".test" ) == (itr->path().string().size()-5) )
+ {
+ results.push_back( std::string() );
+ do_row( *itr,
+ itr->path().filename().string().substr( 0,
+ itr->path().filename().string().size()-5 ),
+ results[results.size()-1] );
+ }
+ }
+ }
+
+// find_compilers ------------------------------------------------------------//
+
+ void find_compilers(const fs::path & bin_dir)
+ {
+ fs::directory_iterator compiler_itr( bin_dir );
+ if ( specific_compiler.empty() )
+ std::clog << "Using " << bin_dir.string() << " to determine compilers\n";
+ for (; compiler_itr != end_itr; ++compiler_itr )
+ {
+ if ( fs::is_directory( *compiler_itr ) // check just to be sure
+ && compiler_itr->path().filename() != "test" ) // avoid strange directory (Jamfile bug?)
+ {
+ if ( specific_compiler.size() != 0
+ && specific_compiler != compiler_itr->path().filename() ) continue;
+ toolsets.push_back( compiler_itr->path().filename().string() );
+ string desc( compiler_desc( compiler_itr->path().filename().string() ) );
+ string vers( version_desc( compiler_itr->path().filename().string() ) );
+ report << "<td>"
+ << (desc.size() ? desc : compiler_itr->path().filename().string())
+ << (vers.size() ? (string( "<br>" ) + vers ) : string( "" ))
+ << "</td>\n";
+ error_count.push_back( 0 );
+ }
+ }
+ }
+
+// do_table_body -----------------------------------------------------------//
+
+ void do_table_body( const fs::path & bin_dir )
+ {
+ // rows are held in a vector so they can be sorted, if desired.
+ std::vector<string> results;
+
+ // do primary bin directory
+ do_rows_for_sub_tree( bin_dir, results );
+
+ // do subinclude bin directories
+ jamfile.clear();
+ jamfile.seekg(0);
+ string line;
+ bool run_tests = false;
+
+ while( std::getline( jamfile, line ) )
+ {
+ bool v2(false);
+ string::size_type sub_pos( line.find( "subinclude" ) );
+ if ( sub_pos == string::npos ) {
+ sub_pos = line.find( "build-project" );
+ v2 = true;
+ }
+ if ( sub_pos != string::npos
+ && line.find( '#' ) > sub_pos )
+ {
+ if (v2)
+ sub_pos = line.find_first_not_of( " \t./", sub_pos+13 );
+ else
+ sub_pos = line.find_first_not_of( " \t./", sub_pos+10 );
+
+ if ( sub_pos == string::npos ) continue;
+ string subinclude_bin_dir(
+ line.substr( sub_pos, line.find_first_of( " \t", sub_pos )-sub_pos ) );
+
+ fs::path bin_path = find_bin_path(subinclude_bin_dir);
+ if (!bin_path.empty())
+ do_rows_for_sub_tree( bin_path, results );
+ }
+ if ( ! run_tests )
+ {
+ string::size_type run_pos = line.find("run-tests");
+ if ( run_pos != string::npos && line.find_first_not_of(" \t") == run_pos )
+ run_tests = true;
+ }
+ else
+ {
+ if ( line.find(";") != string::npos )
+ run_tests = false;
+ else
+ {
+ string::size_type pos = line.find_first_not_of( " \t" );
+ if ( pos != string::npos && line[pos] != '#' )
+ {
+ string::size_type end_pos = line.find_first_of(" \t#", pos);
+ string::iterator end = end_pos != string::npos ? line.begin() + end_pos : line.end();
+ string run_tests_bin_dir(line.begin() + pos, end);
+ fs::path bin_path = find_bin_path("libs/" + run_tests_bin_dir);
+ if (!bin_path.empty())
+ do_rows_for_sub_tree( bin_path, results );
+ }
+ }
+ }
+ }
+
+
+ std::sort( results.begin(), results.end() );
+
+ for ( std::vector<string>::iterator v(results.begin());
+ v != results.end(); ++v )
+ { report << *v << "\n"; }
+ }
+
+// do_table ----------------------------------------------------------------//
+
+ void do_table()
+ {
+ // Find test result locations, trying:
+ // - Boost.Build V1 location with ALL_LOCATE_TARGET
+ // - Boost.Build V2 location with top-lelve "build-dir"
+ // - Boost.Build V1 location without ALL_LOCATE_TARGET
+ string relative( fs::initial_path().string() );
+
+#ifdef BOOST_WINDOWS_API
+ if (relative.size() > 1 && relative[1] == ':') relative[0] = std::tolower(relative[0]);
+#endif
+
+ if ( relative.find(boost_root.string()) != string::npos )
+ relative.erase( 0, boost_root.string().size()+1 );
+ else if ( relative.find(locate_root.string()) != string::npos )
+ relative.erase( 0, locate_root.string().size()+1 );
+ fs::path bin_path = find_bin_path(relative);
+
+ report << "<table border=\"1\" cellspacing=\"0\" cellpadding=\"5\">\n";
+
+ // generate the column headings
+
+ report << "<tr><td>Library</td><td>Test Name</td>\n"
+ "<td><a href=\"compiler_status.html#test-type\">Test Type</a></td>\n";
+
+ if ( relative == "status" )
+ {
+ fs::recursive_directory_iterator ritr( bin_path );
+ fs::recursive_directory_iterator end_ritr;
+ while ( ritr != end_ritr
+ && ((ritr->path().string().find( ".test" ) != (ritr->path().string().size()-5))
+ || !fs::is_directory( *ritr )))
+ ++ritr; // bypass chaff
+ if ( ritr != end_ritr )
+ {
+ find_compilers( *ritr );
+ }
+ }
+ else
+ {
+ fs::directory_iterator itr( bin_path );
+ while ( itr != end_itr
+ && ((itr->path().string().find( ".test" ) != (itr->path().string().size()-5))
+ || !fs::is_directory( *itr )))
+ ++itr; // bypass chaff
+ if ( itr != end_itr )
+ {
+ find_compilers( *itr );
+ }
+ }
+
+ report << "</tr>\n";
+
+ // now the rest of the table body
+
+ do_table_body( bin_path );
+
+ // error total row
+
+ report << "<tr> <td> &nbsp;</td><td>Number of Failures</td><td> &nbsp;</td>\n";
+
+ // for each compiler, generate <td>...</td> html
+ int compiler = 0;
+ for ( std::vector<string>::const_iterator itr=toolsets.begin();
+ itr != toolsets.end(); ++itr, ++compiler )
+ {
+ report << "<td align=\"center\">" << error_count[compiler] << "</td>\n";
+ }
+
+ report << "</tr>\n</table>\n";
+ }
+
+} // unnamed namespace
+
+// main --------------------------------------------------------------------//
+
+#define BOOST_NO_CPP_MAIN_SUCCESS_MESSAGE
+#include <boost/test/included/prg_exec_monitor.hpp>
+
+int cpp_main( int argc, char * argv[] ) // note name!
+{
+ fs::path comment_path;
+ while ( argc > 1 && *argv[1] == '-' )
+ {
+ if ( argc > 2 && std::strcmp( argv[1], "--compiler" ) == 0 )
+ { specific_compiler = argv[2]; --argc; ++argv; }
+ else if ( argc > 2 && std::strcmp( argv[1], "--locate-root" ) == 0 )
+ { locate_root = fs::path( argv[2] ); --argc; ++argv; }
+ else if ( argc > 2 && std::strcmp( argv[1], "--comment" ) == 0 )
+ { comment_path = fs::path( argv[2] ); --argc; ++argv; }
+ else if ( argc > 2 && std::strcmp( argv[1], "--notes" ) == 0 )
+ { notes_path = fs::path( argv[2] ); --argc; ++argv; }
+ else if ( argc > 2 && std::strcmp( argv[1], "--notes-map" ) == 0 )
+ { notes_map_path = fs::path( argv[2] ); --argc; ++argv; }
+ else if ( std::strcmp( argv[1], "--ignore-pass" ) == 0 ) ignore_pass = true;
+ else if ( std::strcmp( argv[1], "--no-warn" ) == 0 ) no_warn = true;
+ else if ( std::strcmp( argv[1], "--v1" ) == 0 ) boost_build_v2 = false;
+ else if ( std::strcmp( argv[1], "--v2" ) == 0 ) boost_build_v2 = true;
+ else if ( argc > 2 && std::strcmp( argv[1], "--jamfile" ) == 0)
+ { jamfile_path = fs::path( argv[2] ); --argc; ++argv; }
+ else if ( std::strcmp( argv[1], "--compile-time" ) == 0 ) compile_time = true;
+ else if ( std::strcmp( argv[1], "--run-time" ) == 0 ) run_time = true;
+ else { std::cerr << "Unknown option: " << argv[1] << "\n"; argc = 1; }
+ --argc;
+ ++argv;
+ }
+
+ if ( argc != 3 && argc != 4 )
+ {
+ std::cerr <<
+ "Usage: compiler_status [options...] boost-root status-file [links-file]\n"
+ " boost-root is the path to the boost tree root directory.\n"
+ " status-file and links-file are paths to the output files.\n"
+ "Must be run from directory containing Jamfile\n"
+ " options: --compiler name Run for named compiler only\n"
+ " --locate-root path Path to ALL_LOCATE_TARGET for bjam;\n"
+ " default boost-root.\n"
+ " --comment path Path to file containing HTML\n"
+ " to be copied into status-file.\n"
+ " --notes path Path to file containing HTML\n"
+ " to be copied into status-file.\n"
+ " --notes-map path Path to file of toolset/test,n lines, where\n"
+ " n is number of note bookmark in --notes file.\n"
+ " --jamfile path Path to Jamfile. By default \"Jamfile\".\n"
+ " --v1 Assume Boost.Build version 1.\n"
+ " --v2 Assume Boost.Build version 2. (default)\n"
+ " --ignore-pass Ignore passing tests.\n"
+ " --no-warn Do not report warnings.\n"
+ " --compile-time Show compile time.\n"
+ " --run-time Show run time.\n"
+ "Example: compiler_status --compiler gcc /boost-root cs.html cs-links.html\n"
+ "Note: Only the leaf of the links-file path and --notes file string are\n"
+ "used in status-file HTML links. Thus for browsing, status-file,\n"
+ "links-file, and --notes file must all be in the same directory.\n"
+ ;
+ return 1;
+ }
+
+ boost_root = fs::path( argv[1] );
+ if ( locate_root.empty() ) locate_root = boost_root;
+
+ if (jamfile_path.empty())
+ if (boost_build_v2)
+ jamfile_path = "Jamfile.v2";
+ else
+ jamfile_path = "Jamfile";
+ jamfile_path = fs::absolute( jamfile_path, fs::initial_path() );
+ jamfile.open( jamfile_path );
+ if ( !jamfile )
+ {
+ std::cerr << "Could not open Jamfile: " << jamfile_path.string() << std::endl;
+ return 1;
+ }
+
+ report.open( fs::path( argv[2] ) );
+ if ( !report )
+ {
+ std::cerr << "Could not open report output file: " << argv[2] << std::endl;
+ return 1;
+ }
+
+ if ( argc == 4 )
+ {
+ fs::path links_path( argv[3] );
+ links_name = links_path.filename().string();
+ links_file.open( links_path );
+ if ( !links_file )
+ {
+ std::cerr << "Could not open links output file: " << argv[3] << std::endl;
+ return 1;
+ }
+ }
+ else no_links = true;
+
+ build_notes_bookmarks();
+
+ char run_date[128];
+ std::time_t tod;
+ std::time( &tod );
+ std::strftime( run_date, sizeof(run_date),
+ "%X UTC, %A %d %B %Y", std::gmtime( &tod ) );
+
+ std::string rev = revision( boost_root );
+
+ report << "<html>\n"
+ "<head>\n"
+ "<title>Boost Test Results</title>\n"
+ "</head>\n"
+ "<body bgcolor=\"#ffffff\" text=\"#000000\">\n"
+ "<table border=\"0\">\n"
+ "<tr>\n"
+ "<td><img border=\"0\" src=\"http://www.boost.org/boost.png\" width=\"277\" "
+ "height=\"86\"></td>\n"
+ "<td>\n"
+ "<h1>Boost Test Results - " + platform_desc() + "</h1>\n"
+ "<b>Run</b> "
+ << run_date;
+ if ( !rev.empty() ) report << ", <b>Revision</b> " << rev;
+ report << "\n";
+
+
+ if ( compile_time )
+ report << "<p>Times reported are elapsed wall clock time in seconds.</p>\n";
+
+
+ if ( !comment_path.empty() )
+ {
+ fs::ifstream comment_file( comment_path );
+ if ( !comment_file )
+ {
+ std::cerr << "Could not open \"--comment\" input file: " << comment_path.string() << std::endl;
+ return 1;
+ }
+ char c;
+ while ( comment_file.get( c ) ) { report.put( c ); }
+ }
+
+ report << "</td>\n</table>\n<br>\n";
+
+ if ( !no_links )
+ {
+ links_file
+ << "<html>\n"
+ "<head>\n"
+ "<title>Boost Test Details</title>\n"
+ "</head>\n"
+ "<body bgcolor=\"#ffffff\" text=\"#000000\">\n"
+ "<table border=\"0\">\n"
+ "<tr>\n"
+ "<td><img border=\"0\" src=\"http://www.boost.org/boost.png\" width=\"277\" "
+ "height=\"86\"></td>\n"
+ "<td>\n"
+ "<h1>Boost Test Details - " + platform_desc() + "</h1>\n"
+ "<b>Run Date:</b> "
+ << run_date;
+ if ( !rev.empty() ) links_file << ", <b>Revision</b> " << rev;
+ links_file << "\n</td>\n</table>\n<br>\n";
+ }
+
+ do_table();
+
+ if ( load_notes_html() ) report << notes_html << "\n";
+
+ report << "</body>\n"
+ "</html>\n"
+ ;
+
+ if ( !no_links )
+ {
+ links_file
+ << "</body>\n"
+ "</html>\n"
+ ;
+ }
+ return 0;
+}
diff --git a/tools/regression/src/detail/tiny_xml.cpp b/tools/regression/src/detail/tiny_xml.cpp
new file mode 100644
index 0000000000..682c04ff7b
--- /dev/null
+++ b/tools/regression/src/detail/tiny_xml.cpp
@@ -0,0 +1,167 @@
+// tiny XML sub-set tools implementation -----------------------------------//
+
+// (C) Copyright Beman Dawes 2002. Distributed under the Boost
+// Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "tiny_xml.hpp"
+#include <cassert>
+#include <cstring>
+
+namespace
+{
+
+ void eat_whitespace( char & c, std::istream & in )
+ {
+ while ( c == ' ' || c == '\r' || c == '\n' || c == '\t' )
+ in.get( c );
+ }
+
+ std::string get_name( char & c, std::istream & in )
+ {
+ std::string result;
+ eat_whitespace( c, in );
+ while ( std::strchr(
+ "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.", c )
+ != 0 )
+ {
+ result += c;
+ if(!in.get( c ))
+ throw std::string("xml: unexpected eof");
+ }
+ return result;
+ }
+
+ void eat_delim( char & c, std::istream & in,
+ char delim, const std::string & msg )
+ {
+ eat_whitespace( c, in );
+ if ( c != delim )
+ throw std::string("xml syntax error, expected ") + delim
+ + " (" + msg + ")";
+ in.get( c );
+ }
+
+ std::string get_value( char & c, std::istream & in )
+ {
+ std::string result;
+ while ( c != '\"' )
+ {
+ result += c;
+ in.get( c );
+ }
+ in.get( c );
+ return result;
+ }
+
+}
+
+namespace boost
+{
+ namespace tiny_xml
+ {
+
+ // parse -----------------------------------------------------------------//
+
+ element_ptr parse( std::istream & in, const std::string & msg )
+ {
+ char c = 0; // current character
+ element_ptr e( new element );
+
+ if(!in.get( c ))
+ throw std::string("xml: unexpected eof");
+ if ( c == '<' )
+ if(!in.get( c ))
+ throw std::string("xml: unexpected eof");
+
+ e->name = get_name( c, in );
+ eat_whitespace( c, in );
+
+ // attributes
+ while ( c != '>' )
+ {
+ attribute a;
+ a.name = get_name( c, in );
+
+ eat_delim( c, in, '=', msg );
+ eat_delim( c, in, '\"', msg );
+
+ a.value = get_value( c, in );
+
+ e->attributes.push_back( a );
+ eat_whitespace( c, in );
+ }
+ if(!in.get( c )) // next after '>'
+ throw std::string("xml: unexpected eof");
+
+ eat_whitespace( c, in );
+
+ // sub-elements
+ while ( c == '<' )
+ {
+ if ( in.peek() == '/' ) break;
+ e->elements.push_back( parse( in, msg ) );
+ in.get( c ); // next after '>'
+ eat_whitespace( c, in );
+ }
+
+ // content
+ if ( c != '<' )
+ {
+ e->content += '\n';
+ while ( c != '<' )
+ {
+ e->content += c;
+ if(!in.get( c ))
+ throw std::string("xml: unexpected eof");
+ }
+ }
+
+ assert( c == '<' );
+ if(!in.get( c )) // next after '<'
+ throw std::string("xml: unexpected eof");
+
+ eat_delim( c, in, '/', msg );
+ std::string end_name( get_name( c, in ) );
+ if ( e->name != end_name )
+ throw std::string("xml syntax error: beginning name ")
+ + e->name + " did not match end name " + end_name
+ + " (" + msg + ")";
+
+ eat_delim( c, in, '>', msg );
+ return e;
+ }
+
+ // write ---------------------------------------------------------------//
+
+ void write( const element & e, std::ostream & out )
+ {
+ out << "<" << e.name;
+ if ( !e.attributes.empty() )
+ {
+ for( attribute_list::const_iterator itr = e.attributes.begin();
+ itr != e.attributes.end(); ++itr )
+ {
+ out << " " << itr->name << "=\"" << itr->value << "\"";
+ }
+ }
+ out << ">";
+ if ( !e.elements.empty() )
+ {
+ out << "\n";
+ for( element_list::const_iterator itr = e.elements.begin();
+ itr != e.elements.end(); ++itr )
+ {
+ write( **itr, out );
+ }
+ }
+ if ( !e.content.empty() )
+ {
+ out << e.content;
+ }
+ out << "</" << e.name << ">\n";
+ }
+
+ } // namespace tiny_xml
+} // namespace boost
+
diff --git a/tools/regression/src/detail/tiny_xml.hpp b/tools/regression/src/detail/tiny_xml.hpp
new file mode 100644
index 0000000000..f9d91d2652
--- /dev/null
+++ b/tools/regression/src/detail/tiny_xml.hpp
@@ -0,0 +1,70 @@
+// tiny XML sub-set tools --------------------------------------------------//
+
+// (C) Copyright Beman Dawes 2002. Distributed under the Boost
+// Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+// Provides self-contained tools for this XML sub-set:
+//
+// element ::= { "<" name { name "=" "\"" value "\"" } ">"
+// {element} [contents] "</" name ">" }
+//
+// The point of "self-contained" is to minimize tool-chain dependencies.
+
+#ifndef BOOST_TINY_XML_H
+#define BOOST_TINY_XML_H
+
+#include "boost/smart_ptr.hpp" // for shared_ptr
+#include "boost/utility.hpp" // for noncopyable
+#include <list>
+#include <iostream>
+#include <string>
+
+namespace boost
+{
+ namespace tiny_xml
+ {
+ class element;
+ struct attribute
+ {
+ std::string name;
+ std::string value;
+
+ attribute(){}
+ attribute( const std::string & name, const std::string & value )
+ : name(name), value(value) {}
+ };
+ typedef boost::shared_ptr< element > element_ptr;
+ typedef std::list< element_ptr > element_list;
+ typedef std::list< attribute > attribute_list;
+
+ class element
+ : private boost::noncopyable // because deep copy sematics would be required
+ {
+ public:
+ std::string name;
+ attribute_list attributes;
+ element_list elements;
+ std::string content;
+
+ element() {}
+ explicit element( const std::string & name ) : name(name) {}
+ };
+
+ element_ptr parse( std::istream & in, const std::string & msg );
+ // Precondition: stream positioned at either the initial "<"
+ // or the first character after the initial "<".
+ // Postcondition: stream positioned at the first character after final
+ // ">" (or eof).
+ // Returns: an element_ptr to an element representing the parsed stream.
+ // Throws: std::string on syntax error. msg appended to what() string.
+
+ void write( const element & e, std::ostream & out );
+
+ }
+}
+
+#endif // BOOST_TINY_XML_H
+
+
+
diff --git a/tools/regression/src/detail/tiny_xml_test.cpp b/tools/regression/src/detail/tiny_xml_test.cpp
new file mode 100644
index 0000000000..b5c0542ba4
--- /dev/null
+++ b/tools/regression/src/detail/tiny_xml_test.cpp
@@ -0,0 +1,17 @@
+// tiny XML test program ---------------------------------------------------//
+
+// Copyright Beman Dawes 2002. Distributed under the Boost
+// Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "tiny_xml.hpp"
+
+#include <iostream>
+
+int main()
+{
+ boost::tiny_xml::element_ptr tree( boost::tiny_xml::parse( std::cin ) );
+ boost::tiny_xml::write( *tree, std::cout );
+ return 0;
+}
+
diff --git a/tools/regression/src/detail/tiny_xml_test.txt b/tools/regression/src/detail/tiny_xml_test.txt
new file mode 100644
index 0000000000..b248cbf062
--- /dev/null
+++ b/tools/regression/src/detail/tiny_xml_test.txt
@@ -0,0 +1,17 @@
+<root>
+<frontmatter>
+// (C) Copyright Beman Dawes 2002. Distributed under the Boost
+// Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+</frontmatter>
+<element-1 at-1="abcd" at-2 = "defg" >
+<element-1a>
+It's Howdy Doody time!
+</element-1a>
+<element-1b>It's not Howdy Doody time!</element-1b>
+</element-1>
+<element-2>
+It's
+Eastern Standard time!
+</element-2>
+</root>
diff --git a/tools/regression/src/library_status.cpp b/tools/regression/src/library_status.cpp
new file mode 100644
index 0000000000..885dfdb176
--- /dev/null
+++ b/tools/regression/src/library_status.cpp
@@ -0,0 +1,901 @@
+// Generate Library Status HTML from jam regression test output -----------//
+
+// Copyright Robert Ramey 2012
+// Copyright Bryce Lelbach 2011
+// Copyright Beman Dawes 2002-2011.
+
+// Distributed under the Boost Software License, Version 1.0. (See accompanying
+// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+// See http://www.boost.org/tools/regression/ for documentation.
+
+//Note: This version of the original program builds a large table
+//which includes all build variations such as build/release, static/dynamic, etc.
+
+
+/*******************************************************************************
+
+This program was designed to work unchanged on all platforms and
+configurations. All output which is platform or configuration dependent
+is obtained from external sources such as the .xml file from
+process_jam_log execution, the tools/build/xxx-tools.jam files, or the
+output of the config_info tests.
+
+Please avoid adding platform or configuration dependencies during
+program maintenance.
+
+*******************************************************************************/
+
+#include <boost/filesystem/operations.hpp>
+#include <boost/filesystem/fstream.hpp>
+#include <boost/foreach.hpp>
+
+namespace fs = boost::filesystem;
+
+#include "detail/tiny_xml.hpp"
+namespace xml = boost::tiny_xml;
+
+#include <boost/iterator/transform_iterator.hpp>
+
+#include <cstdlib> // for abort, exit
+#include <string>
+#include <vector>
+#include <set>
+#include <utility> // for make_pair on STLPort
+#include <map>
+#include <algorithm> // max_element, find_if
+#include <iostream>
+#include <fstream>
+#include <ctime>
+#include <stdexcept>
+#include <cassert>
+#include <utility> // for pair
+
+using std::string;
+
+const string pass_msg( "Pass" );
+const string warn_msg( "<i>Warn</i>" );
+const string fail_msg( "<font color=\"#FF0000\"><i>Fail</i></font>" );
+const string missing_residue_msg( "<i>Missing</i>" );
+
+const std::size_t max_compile_msg_size = 10000;
+
+namespace
+{
+ fs::path locate_root; // locate-root (AKA ALL_LOCATE_TARGET) complete path
+ bool ignore_pass = false;
+ bool no_warn = false;
+ bool no_links = false;
+
+ // transform pathname to something html can accept
+ struct char_xlate {
+ typedef char result_type;
+ result_type operator()(char c) const{
+ if(c == '/' || c == '\\')
+ return '-';
+ return c;
+ }
+ };
+ typedef boost::transform_iterator<char_xlate, std::string::const_iterator> html_from_path;
+
+ template<class I1, class I2>
+ std::ostream & operator<<(
+ std::ostream &os,
+ std::pair<I1, I2> p
+ ){
+ while(p.first != p.second)
+ os << *p.first++;
+ return os;
+ }
+
+ struct col_node {
+ int rows, cols;
+ bool is_leaf;
+ typedef std::pair<const std::string, col_node> subcolumn;
+ typedef std::map<std::string, col_node> subcolumns_t;
+ subcolumns_t m_subcolumns;
+ bool operator<(const col_node &cn) const;
+ col_node() :
+ is_leaf(false)
+ {}
+ std::pair<int, int> get_spans();
+ };
+
+ std::pair<int, int> col_node::get_spans(){
+ rows = 1;
+ cols = 0;
+ if(is_leaf){
+ cols = 1;
+ }
+ if(! m_subcolumns.empty()){
+ BOOST_FOREACH(
+ subcolumn & s,
+ m_subcolumns
+ ){
+ std::pair<int, int> spans;
+ spans = s.second.get_spans();
+ rows = (std::max)(rows, spans.first);
+ cols += spans.second;
+ }
+ ++rows;
+ }
+ return std::make_pair(rows, cols);
+ }
+
+ void build_node_tree(const fs::path & dir_root, col_node & node){
+ bool has_directories = false;
+ bool has_files = false;
+ BOOST_FOREACH(
+ fs::directory_entry & d,
+ std::make_pair(
+ fs::directory_iterator(dir_root),
+ fs::directory_iterator()
+ )
+ ){
+ if(fs::is_directory(d)){
+ has_directories = true;
+ std::pair<col_node::subcolumns_t::iterator, bool> result
+ = node.m_subcolumns.insert(
+ std::make_pair(d.path().filename().string(), col_node())
+ );
+ build_node_tree(d, result.first->second);
+ }
+ else{
+ has_files = true;
+ }
+ }
+ if(has_directories && has_files)
+ throw std::string("invalid bin directory structure");
+ node.is_leaf = has_files;
+ }
+
+ fs::ofstream report;
+ fs::ofstream links_file;
+ string links_name;
+
+ string specific_compiler; // if running on one toolset only
+
+ const string empty_string;
+
+ // extract object library name from target directory string ----------------//
+
+ string extract_object_library_name( const string & s )
+ {
+ string t( s );
+ string::size_type pos = t.find( "/build/" );
+ if ( pos != string::npos ) pos += 7;
+ else if ( (pos = t.find( "/test/" )) != string::npos ) pos += 6;
+ else return "";
+ return t.substr( pos, t.find( "/", pos ) - pos );
+ }
+
+ // find_element ------------------------------------------------------------//
+
+ xml::element_list::const_iterator find_element(
+ const xml::element & root, const string & name
+ ){
+ struct element_equal {
+ const string & m_name;
+ element_equal(const string & name) :
+ m_name(name)
+ {}
+ bool operator()(const xml::element_ptr & xep) const {
+ return xep.get()->name == m_name;
+ }
+ };
+ return std::find_if(
+ root.elements.begin(),
+ root.elements.end(),
+ element_equal(name)
+ );
+ }
+
+ // element_content ---------------------------------------------------------//
+ const string & element_content(
+ const xml::element & root, const string & name
+ ){
+ xml::element_list::const_iterator itr;
+ itr = find_element(root, name);
+ if(root.elements.end() == itr)
+ return empty_string;
+ return (*itr)->content;
+ }
+
+ // attribute_value ----------------------------------------------------------//
+
+ const string & attribute_value(
+ const xml::element & element,
+ const string & attribute_name
+ ){
+ struct attribute_equal {
+ const string & m_name;
+ attribute_equal(const string & name) :
+ m_name(name)
+ {}
+ bool operator()(const xml::attribute & a) const {
+ return a.name == m_name;
+ }
+ };
+ xml::attribute_list::const_iterator itr;
+ itr = std::find_if(
+ element.attributes.begin(),
+ element.attributes.end(),
+ attribute_equal(attribute_name)
+ );
+ if(element.attributes.end() == itr){
+ static const string empty_string;
+ return empty_string;
+ }
+ return itr->value;
+ }
+
+ // generate_report ---------------------------------------------------------//
+
+ // return 0 if nothing generated, 1 otherwise, except 2 if compiler msgs
+ int generate_report(
+ const xml::element & db,
+ const std::string source_library_name,
+ const string & test_type,
+ const fs::path & target_dir,
+ bool pass,
+ bool always_show_run_output
+ )
+ {
+ // compile msgs sometimes modified, so make a local copy
+ string compile( ((pass && no_warn)
+ ? empty_string : element_content( db, "compile" )) );
+
+ const string & link( pass ? empty_string : element_content( db, "link" ) );
+ const string & run( (pass && !always_show_run_output)
+ ? empty_string : element_content( db, "run" ) );
+ string lib( (pass ? empty_string : element_content( db, "lib" )) );
+
+ // some compilers output the filename even if there are no errors or
+ // warnings; detect this if one line of output and it contains no space.
+ string::size_type pos = compile.find( '\n', 1 );
+ if ( pos != string::npos && compile.size()-pos <= 2
+ && compile.find( ' ' ) == string::npos ) compile.clear();
+
+ if ( lib.empty()
+ && (
+ compile.empty() || test_type == "compile_fail"
+ )
+ && link.empty()
+ && run.empty()
+ )
+ return 0;
+
+ int result = 1; // some kind of msg for sure
+
+ // limit compile message length
+ if ( compile.size() > max_compile_msg_size )
+ {
+ compile.erase( max_compile_msg_size );
+ compile += "...\n (remainder deleted because of excessive size)\n";
+ }
+
+ const string target_dir_string = target_dir.string();
+
+ links_file << "<h2><a name=\"";
+ links_file << std::make_pair(
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
+ )
+ << "\">"
+ << std::make_pair(
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
+ )
+ ;
+ links_file << "</a></h2>\n";;
+
+ if ( !compile.empty() )
+ {
+ ++result;
+ links_file << "<h3>Compiler output:</h3><pre>"
+ << compile << "</pre>\n";
+ }
+ if ( !link.empty() )
+ links_file << "<h3>Linker output:</h3><pre>" << link << "</pre>\n";
+ if ( !run.empty() )
+ links_file << "<h3>Run output:</h3><pre>" << run << "</pre>\n";
+
+ // for an object library failure, generate a reference to the object
+ // library failure message, and (once only) generate the object
+ // library failure message itself
+ static std::set< string > failed_lib_target_dirs; // only generate once
+ if ( !lib.empty() )
+ {
+ if ( lib[0] == '\n' ) lib.erase( 0, 1 );
+ string object_library_name( extract_object_library_name( lib ) );
+
+ // changing the target directory naming scheme breaks
+ // extract_object_library_name()
+ assert( !object_library_name.empty() );
+ if ( object_library_name.empty() )
+ std::cerr << "Failed to extract object library name from " << lib << "\n";
+
+ links_file << "<h3>Library build failure: </h3>\n"
+ "See <a href=\"#"
+ << source_library_name << "-"
+ << object_library_name << "-"
+ << std::make_pair(
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
+ )
+ << source_library_name << " - "
+ << object_library_name << " - "
+ << std::make_pair(
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
+ )
+ << "</a>";
+ if ( failed_lib_target_dirs.find( lib ) == failed_lib_target_dirs.end() )
+ {
+ failed_lib_target_dirs.insert( lib );
+ fs::path pth( locate_root / lib / "test_log.xml" );
+ fs::ifstream file( pth );
+ if ( file )
+ {
+ xml::element_ptr db = xml::parse( file, pth.string() );
+ generate_report(
+ *db,
+ source_library_name,
+ test_type,
+ target_dir,
+ false,
+ false
+ );
+ }
+ else
+ {
+ links_file << "<h2><a name=\""
+ << object_library_name << "-"
+ << std::make_pair(
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
+ )
+ << "\">"
+ << object_library_name << " - "
+ << std::make_pair(
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
+ )
+ << "</a></h2>\n"
+ << "test_log.xml not found\n";
+ }
+ }
+ }
+ return result;
+ }
+
+ // do_cell ---------------------------------------------------------------//
+ bool do_cell(
+ const fs::path & target_dir,
+ const string & lib_name,
+ const string & test_name,
+ string & target,
+ bool profile
+ ){
+ // return true if any results except pass_msg
+ bool pass = false;
+
+ fs::path xml_file_path( target_dir / "test_log.xml" );
+ if ( !fs::exists( xml_file_path ) )
+ {
+ fs::path test_path = target_dir / (test_name + ".test");
+ target += "<td align=\"right\">";
+ target += fs::exists( test_path) ? pass_msg : fail_msg;
+ target += "</td>";
+ return true;
+ }
+
+ int anything_generated = 0;
+
+
+ string test_type( "unknown" );
+ bool always_show_run_output( false );
+
+ fs::ifstream file( xml_file_path );
+ xml::element_ptr dbp = xml::parse( file, xml_file_path.string() );
+ const xml::element & db( *dbp );
+
+ always_show_run_output
+ = attribute_value( db, "show-run-output" ) == "true";
+
+ /*
+ test_type = attribute_value( db, "test-type" );
+ std::string test_type_base( test_type );
+ if ( test_type_base.size() > 5 )
+ {
+ const string::size_type trailer = test_type_base.size() - 5;
+ if ( test_type_base.substr( trailer ) == "_fail" )
+ {
+ test_type_base.erase( trailer );
+ }
+ }
+ if ( test_type_base.size() > 4 )
+ {
+ const string::size_type trailer = test_type_base.size() - 4;
+ if ( test_type_base.substr( trailer ) == "_pyd" )
+ {
+ test_type_base.erase( trailer );
+ }
+ }
+
+ xml::element_list::const_iterator itr;
+ itr = find_element( db, test_type_base );
+ if(db.elements.end() == itr)
+ return pass;
+ */
+ pass = (attribute_value( db, "result" ) != "fail");
+
+ if (!no_links){
+ anything_generated =
+ generate_report(
+ db,
+ lib_name,
+ test_type,
+ target_dir,
+ pass,
+ always_show_run_output
+ );
+ }
+
+ // generate the status table cell pass/warn/fail HTML
+ target += "<td align=\"right\">";
+ if ( anything_generated != 0 )
+ {
+ target += "<a href=\"";
+ target += links_name;
+ target += "#";
+ const string target_dir_string = target_dir.string();
+ std::copy(
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end()),
+ std::back_inserter(target)
+ );
+ target += "\">";
+ target += pass
+ ? (anything_generated < 2 ? pass_msg : warn_msg)
+ : fail_msg;
+ target += "</a>";
+ }
+ else target += pass ? pass_msg : fail_msg;
+
+ // if profiling
+ if(profile && pass){
+ // add link to profile
+ target += " <a href=\"";
+ target += (target_dir / "profile.txt").string();
+ target += "\"><i>Profile</i></a>";
+ }
+ target += "</td>";
+ return (anything_generated != 0) || !pass;
+ }
+
+ bool visit_node_tree(
+ const col_node & node,
+ fs::path dir_root,
+ const string & lib_name,
+ const string & test_name,
+ string & target,
+ bool profile
+ ){
+ bool retval = false;
+ if(node.is_leaf){
+ retval = do_cell(
+ dir_root,
+ lib_name,
+ test_name,
+ target,
+ profile
+ );
+ }
+ BOOST_FOREACH(
+ const col_node::subcolumn & s,
+ node.m_subcolumns
+ ){
+ fs::path subdir = dir_root / s.first;
+ retval |= visit_node_tree(
+ s.second,
+ subdir,
+ lib_name,
+ test_name,
+ target,
+ s.first == "profile"
+ );
+ }
+ return retval;
+ }
+
+ // emit results for each test
+ void do_row(
+ col_node test_node,
+ const fs::path & test_dir,
+ const string & lib_name,
+ const string & test_name,
+ string & target
+ ){
+ string::size_type row_start_pos = target.size();
+
+ target += "<tr>";
+
+ target += "<td>";
+ //target += "<a href=\"" + url_prefix_dir_view + "/libs/" + lib_name + "\">";
+ target += test_name;
+ //target += "</a>";
+ target += "</td>";
+
+ bool no_warn_save = no_warn;
+
+ // emit cells on this row
+ bool anything_to_report = visit_node_tree(
+ test_node,
+ test_dir,
+ lib_name,
+ test_name,
+ target,
+ false
+ );
+
+ target += "</tr>";
+
+ if ( ignore_pass
+ && ! anything_to_report )
+ target.erase( row_start_pos );
+
+ no_warn = no_warn_save;
+ }
+
+ // do_table_body -----------------------------------------------------------//
+
+ void do_table_body(
+ col_node root_node,
+ const string & lib_name,
+ const fs::path & test_lib_dir
+ ){
+ // rows are held in a vector so they can be sorted, if desired.
+ std::vector<string> results;
+
+ BOOST_FOREACH(
+ fs::directory_entry & d,
+ std::make_pair(
+ fs::directory_iterator(test_lib_dir),
+ fs::directory_iterator()
+ )
+ ){
+ if(! fs::is_directory(d))
+ continue;
+
+ // if the file name contains ".test"
+ if(d.path().extension() != ".test")
+ continue;
+
+ string test_name = d.path().stem().string();
+
+ results.push_back( std::string() );
+ do_row(
+ root_node, //*test_node_itr++,
+ d, // test dir
+ lib_name,
+ test_name,
+ results[results.size()-1]
+ );
+ }
+
+ std::sort( results.begin(), results.end() );
+
+ BOOST_FOREACH(string &s, results)
+ report << s << "\n";
+ }
+
+ // column header-----------------------------------------------------------//
+ int header_depth(const col_node & root){
+ int max_depth = 1;
+ BOOST_FOREACH(
+ const col_node::subcolumn &s,
+ root.m_subcolumns
+ ){
+ max_depth = (std::max)(max_depth, s.second.rows);
+ }
+ return max_depth;
+ }
+
+ void header_cell(int rows, int cols, const std::string & name){
+ // add row cells
+ report << "<td align=\"center\" " ;
+ if(1 < cols)
+ report << "colspan=\"" << cols << "\" " ;
+ if(1 < rows)
+ // span rows to the end the header
+ report << "rowspan=\"" << rows << "\" " ;
+ report << ">" ;
+ report << name;
+ report << "</td>\n";
+ }
+
+ void emit_column_headers(
+ const col_node & node,
+ int display_row,
+ int current_row,
+ int row_count
+ ){
+ if(current_row < display_row){
+ if(! node.m_subcolumns.empty()){
+ BOOST_FOREACH(
+ const col_node::subcolumn &s,
+ node.m_subcolumns
+ ){
+ emit_column_headers(
+ s.second,
+ display_row,
+ current_row + 1,
+ row_count
+ );
+ }
+ }
+ return;
+ }
+ /*
+ if(node.is_leaf && ! node.m_subcolumns.empty()){
+ header_cell(row_count - current_row, 1, std::string(""));
+ }
+ */
+ BOOST_FOREACH(col_node::subcolumn s, node.m_subcolumns){
+ if(1 == s.second.rows)
+ header_cell(row_count - current_row, s.second.cols, s.first);
+ else
+ header_cell(1, s.second.cols, s.first);
+ }
+ }
+
+ fs::path find_lib_test_dir(fs::path const& initial_path){
+ // walk up from the path were we started until we find
+ // bin or bin.v2
+
+ fs::path test_lib_dir = initial_path;
+ do{
+ if(fs::is_directory( test_lib_dir / "bin.v2")){
+ test_lib_dir /= "bin.v2";
+ break;
+ }
+ if(fs::is_directory( test_lib_dir / "bin")){
+ // v1 includes the word boost
+ test_lib_dir /= "bin";
+ if(fs::is_directory( test_lib_dir / "boost")){
+ test_lib_dir /= "boost";
+ }
+ break;
+ }
+ }while(! test_lib_dir.empty());
+
+ if(test_lib_dir.empty())
+ throw std::string("binary path not found");
+
+ return test_lib_dir;
+ }
+
+ string find_lib_name(fs::path lib_test_dir){
+ // search the path backwards for the magic name "libs"
+ fs::path::iterator e_itr = lib_test_dir.end();
+ while(lib_test_dir.begin() != e_itr){
+ if(*--e_itr == "libs")
+ break;
+ }
+
+ // if its found
+ if(lib_test_dir.begin() != e_itr){
+ // use the whole path since the "libs"
+ ++e_itr;
+ }
+ // otherwise, just use the last two components
+ else{
+ e_itr = lib_test_dir.end();
+ if(e_itr != lib_test_dir.begin()){
+ if(--e_itr != lib_test_dir.begin()){
+ --e_itr;
+ }
+ }
+ }
+
+ fs::path library_name;
+ while(lib_test_dir.end() != e_itr){
+ library_name /= *e_itr++;
+ }
+ return library_name.string();
+ }
+
+ fs::path find_boost_root(fs::path initial_path){
+ fs::path boost_root = initial_path;
+ for(;;){
+ if(fs::is_directory( boost_root / "boost")){
+ break;
+ }
+ if(boost_root.empty())
+ throw std::string("boost root not found");
+ boost_root.remove_filename();
+ }
+
+ return boost_root;
+ }
+
+ // do_table ----------------------------------------------------------------//
+ void do_table(const fs::path & lib_test_dir, const string & lib_name)
+ {
+ col_node root_node;
+
+ BOOST_FOREACH(
+ fs::directory_entry & d,
+ std::make_pair(
+ fs::directory_iterator(lib_test_dir),
+ fs::directory_iterator()
+ )
+ ){
+ if(! fs::is_directory(d))
+ continue;
+ fs::path p = d.path();
+ if(p.extension() != ".test")
+ continue;
+ build_node_tree(d, root_node);
+ }
+
+ // visit directory nodes and record nodetree
+ report << "<table border=\"1\" cellspacing=\"0\" cellpadding=\"5\">\n";
+
+ // emit
+ root_node.get_spans();
+ int row_count = header_depth(root_node);
+ report << "<tr>\n";
+ report << "<td rowspan=\"" << row_count << "\">Test Name</td>\n";
+
+ // emit column headers
+ int row_index = 0;
+ for(;;){
+ emit_column_headers(root_node, row_index, 0, row_count);
+ report << "</tr>" ;
+ if(++row_index == row_count)
+ break;
+ report << "<tr>\n";
+ }
+
+ // now the rest of the table body
+ do_table_body(root_node, lib_name, lib_test_dir);
+
+ report << "</table>\n";
+ }
+}// unnamed namespace
+
+// main --------------------------------------------------------------------//
+
+#define BOOST_NO_CPP_MAIN_SUCCESS_MESSAGE
+#include <boost/test/included/prg_exec_monitor.hpp>
+
+int cpp_main( int argc, char * argv[] ) // note name!
+{
+ fs::path initial_path = fs::initial_path();
+
+ while ( argc > 1 && *argv[1] == '-' )
+ {
+ if ( argc > 2 && std::strcmp( argv[1], "--compiler" ) == 0 )
+ { specific_compiler = argv[2]; --argc; ++argv; }
+ else if ( argc > 2 && std::strcmp( argv[1], "--locate-root" ) == 0 )
+ { locate_root = fs::path( argv[2] ); --argc; ++argv; }
+ else if ( std::strcmp( argv[1], "--ignore-pass" ) == 0 ) ignore_pass = true;
+ else if ( std::strcmp( argv[1], "--no-warn" ) == 0 ) no_warn = true;
+ else if ( std::strcmp( argv[1], "--v2" ) == 0 )
+ {--argc; ++argv ;} // skip
+ else if ( argc > 2 && std::strcmp( argv[1], "--jamfile" ) == 0)
+ {--argc; ++argv;} // skip
+ else { std::cerr << "Unknown option: " << argv[1] << "\n"; argc = 1; }
+ --argc;
+ ++argv;
+ }
+
+ if ( argc != 2 && argc != 3 )
+ {
+ std::cerr <<
+ "Usage: library_status [options...] status-file [links-file]\n"
+ " boost-root is the path to the boost tree root directory.\n"
+ " status-file and links-file are paths to the output files.\n"
+ " options: --compiler name Run for named compiler only\n"
+ " --ignore-pass Do not report tests which pass all compilers\n"
+ " --no-warn Warnings not reported if test passes\n"
+ " --locate-root path Path to ALL_LOCATE_TARGET for bjam;\n"
+ " default boost-root.\n"
+ "Example: library_status --compiler gcc /boost-root cs.html cs-links.html\n"
+ "Note: Only the leaf of the links-file path is\n"
+ "used in status-file HTML links. Thus for browsing, status-file,\n"
+ "links-file must be in the same directory.\n"
+ ;
+ return 1;
+ }
+
+ if(locate_root.empty())
+ if(! fs::exists("bin") && ! fs::exists("bin.v2"))
+ locate_root = find_boost_root(initial_path);
+
+ report.open( fs::path( argv[1] ) );
+ if ( !report )
+ {
+ std::cerr << "Could not open report output file: " << argv[2] << std::endl;
+ return 1;
+ }
+
+ if ( argc == 3 )
+ {
+ fs::path links_path( argv[2] );
+ links_name = links_path.filename().string();
+ links_file.open( links_path );
+ if ( !links_file )
+ {
+ std::cerr << "Could not open links output file: " << argv[3] << std::endl;
+ return 1;
+ }
+ }
+ else no_links = true;
+
+ const string library_name = find_lib_name(initial_path);
+
+ char run_date[128];
+ std::time_t tod;
+ std::time( &tod );
+ std::strftime( run_date, sizeof(run_date),
+ "%X UTC, %A %d %B %Y", std::gmtime( &tod ) );
+
+ report
+ << "<html>\n"
+ << "<head>\n"
+ << "<title>Boost Library Status Automatic Test</title>\n"
+ << "</head>\n"
+ << "<body bgcolor=\"#ffffff\" text=\"#000000\">\n"
+ << "<table border=\"0\">\n"
+ << "<h1>Library Status: " + library_name + "</h1>\n"
+ << "<b>Run Date:</b> "
+ << run_date
+ << "\n<br>"
+ ;
+
+ report << "</td>\n</table>\n<br>\n";
+
+ if ( !no_links )
+ {
+ links_file
+ << "<html>\n"
+ << "<head>\n"
+ << "<title>Boost Library Status Error Log</title>\n"
+ << "</head>\n"
+ << "<body bgcolor=\"#ffffff\" text=\"#000000\">\n"
+ << "<table border=\"0\">\n"
+ << "<h1>Library Status: " + library_name + "</h1>\n"
+ << "<b>Run Date:</b> "
+ << run_date
+ << "\n<br></table>\n<br>\n"
+ ;
+ }
+
+ // detect whether in a a directory which looks like
+ // bin/<library name>/test
+ // or just
+ // bin
+ fs::path library_test_directory = find_lib_test_dir(locate_root);
+ // if libs exists, drop down a couple of levels
+ if(fs::is_directory( library_test_directory / "libs")){
+ library_test_directory /= "libs";
+ library_test_directory /= library_name;
+ }
+
+ do_table(library_test_directory, library_name);
+
+ report << "</body>\n"
+ "</html>\n"
+ ;
+
+ if ( !no_links )
+ {
+ links_file
+ << "</body>\n"
+ "</html>\n"
+ ;
+ }
+ return 0;
+}
diff --git a/tools/regression/src/library_test.bat b/tools/regression/src/library_test.bat
new file mode 100644
index 0000000000..0f33840c34
--- /dev/null
+++ b/tools/regression/src/library_test.bat
@@ -0,0 +1,21 @@
+@echo off
+
+rem Copyright Robert Ramey 2007
+
+rem Distributed under the Boost Software License, Version 1.0.
+rem See http://www.boost.org/LICENSE_1_0.txt
+
+if not "%1" == "" goto bjam
+ echo Usage: %0 "<bjam arguments>"
+ echo where typical bjam arguements are:
+ echo toolset=msvc-7.1,gcc
+ echo variant=debug,release,profile
+ echo link=static,shared
+ echo threading=single,multi
+ echo -sBOOST_ARCHIVE_LIST="<archive name>"
+ goto end
+:bjam
+ bjam --dump-tests %* >bjam.log 2>&1
+ process_jam_log --v2 <bjam.log
+ library_status library_status.html links.html
+:end
diff --git a/tools/regression/src/library_test.sh b/tools/regression/src/library_test.sh
new file mode 100755
index 0000000000..0164d0d1aa
--- /dev/null
+++ b/tools/regression/src/library_test.sh
@@ -0,0 +1,19 @@
+# Copyright Robert Ramey 2007
+
+# Distributed under the Boost Software License, Version 1.0.
+# See http://www.boost.org/LICENSE_1_0.txt
+
+if test $# -eq 0
+then
+ echo "Usage: $0 <bjam arguments>"
+ echo "Typical bjam arguements are:"
+ echo " toolset=msvc-7.1,gcc"
+ echo " variant=debug,release,profile"
+ echo " link=static,shared"
+ echo " threading=single,multi"
+ echo " -sBOOST_ARCHIVE_LIST=<archive name>"
+else
+ bjam --dump-tests $@ >bjam.log 2>&1
+ process_jam_log --v2 <bjam.log
+ library_status library_status.html links.html
+fi
diff --git a/tools/regression/src/library_test_all.sh b/tools/regression/src/library_test_all.sh
new file mode 100755
index 0000000000..ed5177d412
--- /dev/null
+++ b/tools/regression/src/library_test_all.sh
@@ -0,0 +1,85 @@
+if test $# -eq 0
+then
+ echo "Usage: $0 <bjam arguments>"
+ echo "Typical bjam arguments are:"
+ echo " toolset=msvc-7.1,gcc"
+ echo " variant=debug,release,profile"
+ echo " link=static,shared"
+ echo " threading=single,multi"
+ echo
+ echo "note: make sure this script is run from boost root directory !!!"
+ exit 1
+fi
+
+if ! test -e libs
+then
+ echo No libs directory found. Run from boost root directory !!!
+ exit 1
+fi
+
+#html header
+cat <<end >status/library_status_contents.html
+<!doctype HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<!--
+(C) Copyright 2007 Robert Ramey - http://www.rrsd.com .
+Use, modification and distribution is subject to the Boost Software
+License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+-->
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<link rel="stylesheet" type="text/css" href="../boost.css">
+<title>Library Status Contents</title>
+<body>
+end
+
+cd >nul libs
+
+# runtests, create library pages, and body of summary page
+for lib_name in *
+do
+ if test -d $lib_name
+ then
+ cd >nul $lib_name
+
+ if test -e "test/Jamfile.v2"
+ then
+ cd >nul test
+ echo $lib_name
+ echo >>../../../status/library_status_contents.html "<a target=\"detail\" href=\"../libs/$lib_name/test/library_status.html\">$lib_name</a><br>"
+ ../../../tools/regression/src/library_test.sh $@
+ cd >nul ..
+ fi
+
+ for sublib_name in *
+ do
+ if test -d $sublib_name
+ then
+ cd >nul $sublib_name
+ if test -e "test/Jamfile.v2"
+ then
+ cd >nul test
+ echo $lib_name/$sublib_name
+ echo >>../../../../status/library_status_contents.html "<a target=\"detail\" href=\"../libs/$lib_name/$sublib_name/test/library_status.html\">$lib_name/$sublib_name</a><br>"
+ ../../../../tools/regression/src/library_test.sh $@
+ cd >nul ..
+ fi
+ cd >nul ..
+ fi
+ done
+
+ cd >nul ..
+ fi
+done
+
+
+cd >nul ..
+
+#html trailer
+cat <<end >>status/library_status_contents.html
+</body>
+</html>
+end
+
+
diff --git a/tools/regression/src/process_jam_log.cpp b/tools/regression/src/process_jam_log.cpp
new file mode 100644
index 0000000000..8f1b19b06a
--- /dev/null
+++ b/tools/regression/src/process_jam_log.cpp
@@ -0,0 +1,893 @@
+// process jam regression test output into XML -----------------------------//
+
+// Copyright Beman Dawes 2002. Distributed under the Boost
+// Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+// See http://www.boost.org/tools/regression for documentation.
+
+#define BOOST_FILESYSTEM_VERSION 3
+
+#include <boost/config/warning_disable.hpp>
+
+#include "detail/tiny_xml.hpp"
+#include "boost/filesystem/operations.hpp"
+#include "boost/filesystem/fstream.hpp"
+#include "boost/filesystem/exception.hpp"
+#include "boost/filesystem/convenience.hpp"
+
+#include <iostream>
+#include <string>
+#include <cstring>
+#include <map>
+#include <utility> // for make_pair
+#include <ctime>
+#include <cctype> // for tolower
+#include <cstdlib> // for exit
+
+using std::string;
+namespace xml = boost::tiny_xml;
+namespace fs = boost::filesystem;
+
+// options
+
+static bool echo = false;
+static bool create_dirs = false;
+static bool boost_build_v2 = true;
+
+namespace
+{
+ struct test_info
+ {
+ string file_path; // relative boost-root
+ string type;
+ bool always_show_run_output;
+ };
+ typedef std::map< string, test_info > test2info_map; // key is test-name
+ test2info_map test2info;
+
+ fs::path boost_root;
+ fs::path locate_root; // ALL_LOCATE_TARGET (or boost_root if none)
+
+ // set_boost_root --------------------------------------------------------//
+
+ void set_boost_root()
+ {
+
+ boost_root = fs::initial_path();
+
+ for(;;)
+ {
+ if ( fs::exists( boost_root / "libs" ) )
+ {
+ fs::current_path( fs::initial_path() ); // restore initial path
+ return;
+ }
+ fs::current_path( ".." );
+ if ( boost_root == fs::current_path() )
+ {
+ fs::current_path( fs::initial_path() ); // restore initial path
+ std::cout <<
+ "Abort: process_jam_log must be run from within a boost directory tree\n";
+ std::exit(1);
+ }
+ boost_root = fs::current_path();
+ }
+ }
+
+ // append_html -------------------------------------------------------------//
+
+ void append_html( const string & src, string & target )
+ {
+ // there are a few lines we want to ignore
+ if ( src.find( "th target..." ) != string::npos
+ || src.find( "cc1plus.exe: warning: changing search order for system directory" ) != string::npos
+ || src.find( "cc1plus.exe: warning: as it has already been specified as a non-system directory" ) != string::npos
+ ) return;
+
+ // on some platforms (e.g. tru64cxx) the following line is a real performance boost
+ target.reserve(src.size() * 2 + target.size());
+
+ for ( string::size_type pos = 0; pos < src.size(); ++pos )
+ {
+ if ( src[pos] == '<' ) target += "&lt;";
+ else if ( src[pos] == '>' ) target += "&gt;";
+ else if ( src[pos] == '&' ) target += "&amp;";
+ else target += src[pos];
+ }
+ }
+
+ // timestamp ---------------------------------------------------------------//
+
+ string timestamp()
+ {
+ char run_date[128];
+ std::time_t tod;
+ std::time( &tod );
+ std::strftime( run_date, sizeof(run_date),
+ "%Y-%m-%d %X UTC", std::gmtime( &tod ) );
+ return string( run_date );
+ }
+
+// convert path separators to forward slashes ------------------------------//
+
+ void convert_path_separators( string & s )
+ {
+ for ( string::iterator itr = s.begin(); itr != s.end(); ++itr )
+ if ( *itr == '\\' || *itr == '!' ) *itr = '/';
+ }
+
+// trim_left ----------------------------------------------------------------//
+
+ std::string trim_left( std::string const& s )
+ {
+ std::string::size_type const pos( s.find_first_not_of(' ') );
+ return pos != std::string::npos
+ ? s.substr( pos, s.size() - pos + 1 )
+ : ""
+ ;
+ }
+
+
+// split --------------------------------------------------------------------//
+
+ std::vector<std::string> split( std::string const& s )
+ {
+ std::string::size_type const pos( s.find_first_of(' ') );
+ std::vector<std::string> result( 1, s.substr( 0, pos ) );
+ if ( pos == std::string::npos )
+ return result;
+
+ std::vector<std::string> const rest( split( trim_left( s.substr( pos, s.size() - pos + 1 ) ) ) );
+ result.insert( result.end(), rest.begin(), rest.end() );
+ return result;
+ }
+
+
+// extract a target directory path from a jam target string ----------------//
+// s may be relative to the initial_path:
+// ..\..\..\libs\foo\build\bin\libfoo.lib\vc7\debug\runtime-link-dynamic\boo.obj
+// s may be absolute:
+// d:\myboost\libs\foo\build\bin\libfoo.lib\vc7\debug\runtime-link-dynamic\boo.obj
+// return path is always relative to the boost directory tree:
+// libs/foo/build/bin/libfs.lib/vc7/debug/runtime-link-dynamic
+
+ string target_directory( const string & s )
+ {
+ string temp( s );
+ convert_path_separators( temp );
+ temp.erase( temp.find_last_of( "/" ) ); // remove leaf
+ temp = split( trim_left( temp ) ).back();
+ if ( temp[0] == '.' ) temp.erase( 0, temp.find_first_not_of( "./" ) );
+ else temp.erase( 0, locate_root.string().size()+1 );
+ if ( echo )
+ std::cout << "\ttarget_directory( \"" << s << "\") -> \"" << temp << "\"" << std::endl;
+ return temp;
+ }
+
+ string::size_type target_name_end( const string & s )
+ {
+ string::size_type pos = s.find( ".test/" );
+ if ( pos == string::npos ) pos = s.find( ".dll/" );
+ if ( pos == string::npos ) pos = s.find( ".so/" );
+ if ( pos == string::npos ) pos = s.find( ".lib/" );
+ if ( pos == string::npos ) pos = s.find( ".pyd/" );
+ if ( pos == string::npos ) pos = s.find( ".a/" );
+ return pos;
+ }
+
+ string toolset( const string & s )
+ {
+ string::size_type pos = target_name_end( s );
+ if ( pos == string::npos ) pos = s.find( "build/" );
+ if ( pos == string::npos ) return "";
+ pos = s.find( "/", pos ) + 1;
+ return s.substr( pos, s.find( "/", pos ) - pos );
+ }
+
+ string test_name( const string & s )
+ {
+ string::size_type pos = target_name_end( s );
+ if ( pos == string::npos ) return "";
+ string::size_type pos_start = s.rfind( '/', pos ) + 1;
+ return s.substr( pos_start,
+ (s.find( ".test/" ) != string::npos
+ ? pos : s.find( "/", pos )) - pos_start );
+ }
+
+ // Take a path to a target directory of test, and
+ // returns library name corresponding to that path.
+ string test_path_to_library_name( string const& path )
+ {
+ std::string result;
+ string::size_type start_pos( path.find( "libs/" ) );
+ if ( start_pos == string::npos ) {
+ start_pos = path.find( "tools/" );
+ }
+
+ if ( start_pos != string::npos )
+ {
+ // The path format is ...libs/functional/hash/test/something.test/....
+ // So, the part between "libs" and "test/something.test" can be considered
+ // as library name. But, for some libraries tests are located too deep,
+ // say numeric/ublas/test/test1 directory, and some libraries have tests
+ // in several subdirectories (regex/example and regex/test). So, nested
+ // directory may belong to several libraries.
+
+ // To disambituate, it's possible to place a 'sublibs' file in
+ // a directory. It means that child directories are separate libraries.
+ // It's still possible to have tests in the directory that has 'sublibs'
+ // file.
+
+ std::string interesting;
+ start_pos = path.find( '/', start_pos ) + 1;
+ string::size_type end_pos( path.find( ".test/", start_pos ) );
+ end_pos = path.rfind('/', end_pos);
+ if (path.substr(end_pos - 5, 5) == "/test")
+ interesting = path.substr( start_pos, end_pos - 5 - start_pos );
+ else
+ interesting = path.substr( start_pos, end_pos - start_pos );
+
+ // Take slash separate elements until we have corresponding 'sublibs'.
+ end_pos = 0;
+ for(;;)
+ {
+ end_pos = interesting.find('/', end_pos);
+ if (end_pos == string::npos) {
+ result = interesting;
+ break;
+ }
+ result = interesting.substr(0, end_pos);
+
+ if ( fs::exists( ( boost_root / "libs" ) / result / "sublibs" ) )
+ {
+ end_pos = end_pos + 1;
+ }
+ else
+ break;
+ }
+ }
+
+ return result;
+ }
+
+ // Tries to find target name in the string 'msg', starting from
+ // position start.
+ // If found, extract the directory name from the target name and
+ // stores it in 'dir', and return the position after the target name.
+ // Otherwise, returns string::npos.
+ string::size_type parse_skipped_msg_aux(const string& msg,
+ string::size_type start,
+ string& dir)
+ {
+ dir.clear();
+ string::size_type start_pos = msg.find( '<', start );
+ if ( start_pos == string::npos ) return string::npos;
+ ++start_pos;
+ string::size_type end_pos = msg.find( '>', start_pos );
+ dir += msg.substr( start_pos, end_pos - start_pos );
+ if ( boost_build_v2 )
+ {
+ // The first letter is a magic value indicating
+ // the type of grist.
+ convert_path_separators( dir );
+ dir.erase( 0, 1 );
+ // We need path from root, not from 'status' dir.
+ if (dir.find("../") == 0)
+ dir.erase(0,3);
+ else // dir is always relative to the boost directory tree
+ dir.erase( 0, locate_root.string().size()+1 );
+ }
+ else
+ {
+ if ( dir[0] == '@' )
+ {
+ // new style build path, rooted build tree
+ convert_path_separators( dir );
+ dir.replace( 0, 1, "bin/" );
+ }
+ else
+ {
+ // old style build path, integrated build tree
+ start_pos = dir.rfind( '!' );
+ convert_path_separators( dir );
+ string::size_type path_sep_pos = dir.find( '/', start_pos + 1 );
+ if ( path_sep_pos != string::npos )
+ dir.insert( path_sep_pos, "/bin" );
+ else
+ {
+ // see http://article.gmane.org/gmane.comp.lib.boost.devel/146688;
+ // the following code assumes that: a) 'dir' is not empty,
+ // b) 'end_pos != string::npos' and c) 'msg' always ends with '...'
+ if ( dir[dir.size() - 1] == '@' )
+ dir += "/" + msg.substr( end_pos + 1, msg.size() - end_pos - 1 - 3 );
+ }
+ }
+ }
+ return end_pos;
+ }
+
+ // the format of paths is really kinky, so convert to normal form
+ // first path is missing the leading "..\".
+ // first path is missing "\bin" after "status".
+ // second path is missing the leading "..\".
+ // second path is missing "\bin" after "build".
+ // second path uses "!" for some separators.
+ void parse_skipped_msg( const string & msg,
+ string & first_dir, string & second_dir )
+ {
+ string::size_type pos = parse_skipped_msg_aux(msg, 0, first_dir);
+ if (pos == string::npos)
+ return;
+ parse_skipped_msg_aux(msg, pos, second_dir);
+ }
+
+// test_log hides database details -----------------------------------------//
+
+ class test_log
+ : boost::noncopyable
+ {
+ const string & m_target_directory;
+ xml::element_ptr m_root;
+ public:
+ test_log( const string & target_directory,
+ const string & test_name,
+ const string & toolset,
+ bool force_new_file )
+ : m_target_directory( target_directory )
+ {
+ if ( !force_new_file )
+ {
+ fs::path pth( locate_root / target_directory / "test_log.xml" );
+ fs::ifstream file( pth );
+ if ( file ) // existing file
+ {
+ try
+ {
+ m_root = xml::parse( file, pth.string() );
+ return;
+ }
+ catch(...)
+ {
+ // unable to parse existing XML file, fall through
+ }
+ }
+ }
+
+ string library_name( test_path_to_library_name( target_directory ) );
+
+ test_info info;
+ test2info_map::iterator itr( test2info.find( library_name + "/" + test_name ) );
+ if ( itr != test2info.end() )
+ info = itr->second;
+
+ if ( !info.file_path.empty() )
+ library_name = test_path_to_library_name( info.file_path );
+
+ if ( info.type.empty() )
+ {
+ if ( target_directory.find( ".lib/" ) != string::npos
+ || target_directory.find( ".dll/" ) != string::npos
+ || target_directory.find( ".so/" ) != string::npos
+ || target_directory.find( ".dylib/" ) != string::npos
+ || target_directory.find( "/build/" ) != string::npos
+ )
+ {
+ info.type = "lib";
+ }
+ else if ( target_directory.find( ".pyd/" ) != string::npos )
+ info.type = "pyd";
+ }
+
+ m_root.reset( new xml::element( "test-log" ) );
+ m_root->attributes.push_back(
+ xml::attribute( "library", library_name ) );
+ m_root->attributes.push_back(
+ xml::attribute( "test-name", test_name ) );
+ m_root->attributes.push_back(
+ xml::attribute( "test-type", info.type ) );
+ m_root->attributes.push_back(
+ xml::attribute( "test-program", info.file_path ) );
+ m_root->attributes.push_back(
+ xml::attribute( "target-directory", target_directory ) );
+ m_root->attributes.push_back(
+ xml::attribute( "toolset", toolset ) );
+ m_root->attributes.push_back(
+ xml::attribute( "show-run-output",
+ info.always_show_run_output ? "true" : "false" ) );
+ }
+
+ ~test_log()
+ {
+ fs::path pth( locate_root / m_target_directory / "test_log.xml" );
+ if ( create_dirs && !fs::exists( pth.branch_path() ) )
+ fs::create_directories( pth.branch_path() );
+ fs::ofstream file( pth );
+ if ( !file )
+ {
+ std::cout << "*****Warning - can't open output file: "
+ << pth.string() << "\n";
+ }
+ else xml::write( *m_root, file );
+ }
+
+ const string & target_directory() const { return m_target_directory; }
+
+ void remove_action( const string & action_name )
+ // no effect if action_name not found
+ {
+ xml::element_list::iterator itr;
+ for ( itr = m_root->elements.begin();
+ itr != m_root->elements.end() && (*itr)->name != action_name;
+ ++itr ) {}
+ if ( itr != m_root->elements.end() ) m_root->elements.erase( itr );
+ }
+
+ void add_action( const string & action_name,
+ const string & result,
+ const string & timestamp,
+ const string & content )
+ {
+ remove_action( action_name );
+ xml::element_ptr action( new xml::element(action_name) );
+ m_root->elements.push_back( action );
+ action->attributes.push_back( xml::attribute( "result", result ) );
+ action->attributes.push_back( xml::attribute( "timestamp", timestamp ) );
+ action->content = content;
+ }
+ };
+
+// message_manager maps input messages into test_log actions ---------------//
+
+ class message_manager
+ : boost::noncopyable
+ {
+ string m_action_name; // !empty() implies action pending
+ // IOW, a start_message awaits stop_message
+ string m_target_directory;
+ string m_test_name;
+ string m_toolset;
+
+ bool m_note; // if true, run result set to "note"
+ // set false by start_message()
+
+ // data needed to stop further compile action after a compile failure
+ // detected in the same target directory
+ string m_previous_target_directory;
+ bool m_compile_failed;
+
+ public:
+ message_manager() : m_note(false) {}
+ ~message_manager() { /*assert( m_action_name.empty() );*/ }
+
+ bool note() const { return m_note; }
+ void note( bool value ) { m_note = value; }
+
+ void start_message( const string & action_name,
+ const string & target_directory,
+ const string & test_name,
+ const string & toolset,
+ const string & prior_content )
+ {
+ assert( !target_directory.empty() );
+
+ if ( !m_action_name.empty() ) stop_message( prior_content );
+ m_action_name = action_name;
+ m_target_directory = target_directory;
+ m_test_name = test_name;
+ m_toolset = toolset;
+ m_note = false;
+
+ if ( m_previous_target_directory != target_directory )
+ {
+ m_previous_target_directory = target_directory;
+ m_compile_failed = false;
+ }
+ }
+
+ void stop_message( const string & content )
+ {
+ if ( m_action_name.empty() ) return;
+ stop_message( m_action_name, m_target_directory,
+ "succeed", timestamp(), content );
+ }
+
+ void stop_message( const string & action_name,
+ const string & target_directory,
+ const string & result,
+ const string & timestamp,
+ const string & content )
+ // the only valid action_names are "compile", "link", "run", "lib"
+ {
+ // My understanding of the jam output is that there should never be
+ // a stop_message that was not preceeded by a matching start_message.
+ // That understanding is built into message_manager code.
+ assert( m_action_name == action_name );
+ assert( m_target_directory == target_directory );
+ assert( result == "succeed" || result == "fail" );
+
+ // if test_log.xml entry needed
+ if ( !m_compile_failed
+ || action_name != "compile"
+ || m_previous_target_directory != target_directory )
+ {
+ if ( action_name == "compile"
+ && result == "fail" ) m_compile_failed = true;
+
+ test_log tl( target_directory,
+ m_test_name, m_toolset, action_name == "compile" );
+ tl.remove_action( "lib" ); // always clear out lib residue
+
+ // dependency removal
+ if ( action_name == "lib" )
+ {
+ tl.remove_action( "compile" );
+ tl.remove_action( "link" );
+ tl.remove_action( "run" );
+ }
+ else if ( action_name == "compile" )
+ {
+ tl.remove_action( "link" );
+ tl.remove_action( "run" );
+ if ( result == "fail" ) m_compile_failed = true;
+ }
+ else if ( action_name == "link" )
+ {
+ tl.remove_action( "run" );
+ }
+
+ // dependency removal won't work right with random names, so assert
+ else { assert( action_name == "run" ); }
+
+ // add the "run" stop_message action
+ tl.add_action( action_name,
+ result == "succeed" && note() ? std::string("note") : result,
+ timestamp, content );
+ }
+
+ m_action_name = ""; // signal no pending action
+ m_previous_target_directory = target_directory;
+ }
+ };
+}
+
+
+// main --------------------------------------------------------------------//
+
+
+int main( int argc, char ** argv )
+{
+ // Turn off synchronization with corresponding C standard library files. This
+ // gives a significant speed improvement on platforms where the standard C++
+ // streams are implemented using standard C files.
+ std::ios::sync_with_stdio(false);
+
+ fs::initial_path();
+ std::istream* input = 0;
+
+ if ( argc <= 1 )
+ {
+ std::cout << "process_jam_log [--echo] [--create-directories] [--v1|--v2]\n"
+ " [--boost-root boost_root] [--locate-root locate_root]\n"
+ " [--input-file input_file]\n"
+ " [locate-root]\n"
+ "--echo - verbose diagnostic output.\n"
+ "--create-directories - if the directory for xml file doesn't exists - creates it.\n"
+ " usually used for processing logfile on different machine\n"
+ "--v2 - bjam version 2 used (default).\n"
+ "--v1 - bjam version 1 used.\n"
+ "--boost-root - the root of the boost installation being used. If not defined\n"
+ " assume to run from within it and discover it heuristically.\n"
+ "--locate-root - the same as the bjam ALL_LOCATE_TARGET\n"
+ " parameter, if any. Default is boost-root.\n"
+ "--input-file - the output of a bjam --dump-tests run. Default is std input.\n"
+ ;
+ return 1;
+ }
+
+ while ( argc > 1 )
+ {
+ if ( std::strcmp( argv[1], "--echo" ) == 0 )
+ {
+ echo = true;
+ --argc; ++argv;
+ }
+ else if ( std::strcmp( argv[1], "--create-directories" ) == 0 )
+ {
+ create_dirs = true;
+ --argc; ++argv;
+ }
+ else if ( std::strcmp( argv[1], "--v2" ) == 0 )
+ {
+ boost_build_v2 = true;
+ --argc; ++argv;
+ }
+ else if ( std::strcmp( argv[1], "--v1" ) == 0 )
+ {
+ boost_build_v2 = false;
+ --argc; ++argv;
+ }
+ else if ( std::strcmp( argv[1], "--boost-root" ) == 0 )
+ {
+ --argc; ++argv;
+ if ( argc == 1 )
+ {
+ std::cout << "Abort: option --boost-root requires a directory argument\n";
+ std::exit(1);
+ }
+ boost_root = fs::path( argv[1] );
+ if ( !boost_root.is_complete() )
+ boost_root = ( fs::initial_path() / boost_root ).normalize();
+
+ --argc; ++argv;
+ }
+ else if ( std::strcmp( argv[1], "--locate-root" ) == 0 )
+ {
+ --argc; ++argv;
+ if ( argc == 1 )
+ {
+ std::cout << "Abort: option --locate-root requires a directory argument\n";
+ std::exit(1);
+ }
+ locate_root = fs::path( argv[1] );
+ --argc; ++argv;
+ }
+ else if ( std::strcmp( argv[1], "--input-file" ) == 0 )
+ {
+ --argc; ++argv;
+ if ( argc == 1 )
+ {
+ std::cout << "Abort: option --input-file requires a filename argument\n";
+ std::exit(1);
+ }
+ input = new std::ifstream(argv[1]);
+ --argc; ++argv;
+ }
+ else if ( *argv[1] == '-' )
+ {
+ std::cout << "Abort: unknown option; invoke with no arguments to see list of valid options\n";
+ return 1;
+ }
+ else
+ {
+ locate_root = fs::path( argv[1] );
+ --argc; ++argv;
+ }
+ }
+
+ if ( boost_root.empty() )
+ {
+ set_boost_root();
+ boost_root.normalize();
+ }
+
+
+ if ( locate_root.empty() )
+ {
+ locate_root = boost_root;
+ }
+ else if ( !locate_root.is_complete() )
+ {
+ locate_root = ( fs::initial_path() / locate_root ).normalize();
+ }
+
+ if ( input == 0 )
+ {
+ input = &std::cin;
+ }
+
+ std::cout << "boost_root: " << boost_root.string() << '\n'
+ << "locate_root: " << locate_root.string() << '\n';
+
+ message_manager mgr;
+
+ string line;
+ string content;
+ bool capture_lines = false;
+
+ // This loop looks at lines for certain signatures, and accordingly:
+ // * Calls start_message() to start capturing lines. (start_message() will
+ // automatically call stop_message() if needed.)
+ // * Calls stop_message() to stop capturing lines.
+ // * Capture lines if line capture on.
+
+ static const int max_line_length = 8192;
+ int line_num = 0;
+ while ( std::getline( *input, line ) )
+ {
+ if (max_line_length < line.size()) line = line.substr(0, max_line_length);
+
+ ++line_num;
+
+ std::vector<std::string> const line_parts( split( line ) );
+ std::string const line_start( line_parts[0] != "...failed"
+ ? line_parts[0]
+ : line_parts[0] + " " + line_parts[1]
+ );
+
+ if ( echo )
+ {
+ std::cout
+ << "line " << line_num << ": " << line << "\n"
+ << "\tline_start: " << line_start << "\n";
+ }
+
+ // create map of test-name to test-info
+ if ( line_start.find( "boost-test(" ) == 0 )
+ {
+ string::size_type pos = line.find( '"' );
+ string test_name( line.substr( pos+1, line.find( '"', pos+1)-pos-1 ) );
+ test_info info;
+ info.always_show_run_output
+ = line.find( "\"always_show_run_output\"" ) != string::npos;
+ info.type = line.substr( 11, line.find( ')' )-11 );
+ for (unsigned int i = 0; i!=info.type.size(); ++i )
+ { info.type[i] = std::tolower( info.type[i] ); }
+ pos = line.find( ':' );
+ // the rest of line is missing if bjam didn't know how to make target
+ if ( pos + 1 != line.size() )
+ {
+ info.file_path = line.substr( pos+3,
+ line.find( "\"", pos+3 )-pos-3 );
+ convert_path_separators( info.file_path );
+ if ( info.file_path.find( "libs/libs/" ) == 0 ) info.file_path.erase( 0, 5 );
+ if ( test_name.find( "/" ) == string::npos )
+ test_name = "/" + test_name;
+ test2info.insert( std::make_pair( test_name, info ) );
+ // std::cout << test_name << ", " << info.type << ", " << info.file_path << "\n";
+ }
+ else
+ {
+ std::cout << "*****Warning - missing test path: " << line << "\n"
+ << " (Usually occurs when bjam doesn't know how to make a target)\n";
+ }
+ continue;
+ }
+
+ // these actions represent both the start of a new action
+ // and the end of a failed action
+ else if ( line_start.find( "C++-action" ) != string::npos
+ || line_start.find( "vc-C++" ) != string::npos
+ || line_start.find( "C-action" ) != string::npos
+ || line_start.find( "Cc-action" ) != string::npos
+ || line_start.find( "vc-Cc" ) != string::npos
+ || line_start.find( ".compile.") != string::npos
+ || line_start.find( "compile-") != string::npos
+ || line_start.find( "-compile") != string::npos
+ || line_start.find( "Link-action" ) != string::npos
+ || line_start.find( "vc-Link" ) != string::npos
+ || line_start.find( "Archive-action" ) != string::npos
+ || line_start.find( ".archive") != string::npos
+ || ( line_start.find( ".link") != string::npos &&
+ // .linkonce is present in gcc linker messages about
+ // unresolved symbols. We don't have to parse those
+ line_start.find( ".linkonce" ) == string::npos )
+ )
+ {
+ //~ if ( !test2info.size() )
+ //~ {
+ //~ std::cout << "*****Error - No \"boost-test\" lines encountered.\n"
+ //~ " (Usually occurs when bjam was envoked without the --dump-tests option\n"
+ //~ " or bjam was envoked in the wrong directory)\n";
+ //~ return 1;
+ //~ }
+
+ string action( ( line_start.find( "Link-action" ) != string::npos
+ || line_start.find( "vc-Link" ) != string::npos
+ || line_start.find( "Archive-action" ) != string::npos
+ || line_start.find( ".archive") != string::npos
+ || line_start.find( ".link") != string::npos
+ )
+ ? "link" : "compile"
+ );
+
+ if ( line_start.find( "...failed " ) != string::npos )
+ {
+ mgr.stop_message( action, target_directory( line ),
+ "fail", timestamp(), content );
+ }
+ else
+ {
+ string target_dir( target_directory( line ) );
+ mgr.start_message( action, target_dir,
+ test_name( target_dir ), toolset( target_dir ), content );
+ }
+ content = "\n";
+ capture_lines = true;
+ }
+
+ // these actions are only used to stop the previous action
+ else if ( line_start.find( "-Archive" ) != string::npos
+ || line_start.find( "MkDir" ) == 0
+ || line_start.find( "common.mkdir" ) == 0 )
+ {
+ mgr.stop_message( content );
+ content.clear();
+ capture_lines = false;
+ }
+
+ else if ( line_start.find( "execute-test" ) != string::npos
+ || line_start.find( "capture-output" ) != string::npos )
+ {
+ if ( line_start.find( "...failed " ) != string::npos )
+ {
+ mgr.stop_message( "run", target_directory( line ),
+ "fail", timestamp(), content );
+ content = "\n";
+ capture_lines = true;
+ }
+ else
+ {
+ string target_dir( target_directory( line ) );
+ mgr.start_message( "run", target_dir,
+ test_name( target_dir ), toolset( target_dir ), content );
+
+ // contents of .output file for content
+ capture_lines = false;
+ content = "\n";
+ fs::ifstream file( locate_root / target_dir
+ / (test_name(target_dir) + ".output") );
+ if ( file )
+ {
+ string ln;
+ while ( std::getline( file, ln ) )
+ {
+ if ( ln.find( "<note>" ) != string::npos ) mgr.note( true );
+ append_html( ln, content );
+ content += "\n";
+ }
+ }
+ }
+ }
+
+ // bjam indicates some prior dependency failed by a "...skipped" message
+ else if ( line_start.find( "...skipped" ) != string::npos
+ && line.find( "<directory-grist>" ) == string::npos
+ )
+ {
+ mgr.stop_message( content );
+ content.clear();
+ capture_lines = false;
+
+ if ( line.find( " for lack of " ) != string::npos )
+ {
+ capture_lines = ( line.find( ".run for lack of " ) == string::npos );
+
+ string target_dir;
+ string lib_dir;
+
+ parse_skipped_msg( line, target_dir, lib_dir );
+
+ if ( target_dir != lib_dir ) // it's a lib problem
+ {
+ mgr.start_message( "lib", target_dir,
+ test_name( target_dir ), toolset( target_dir ), content );
+ content = lib_dir;
+ mgr.stop_message( "lib", target_dir, "fail", timestamp(), content );
+ content = "\n";
+ }
+ }
+
+ }
+
+ else if ( line_start.find( "**passed**" ) != string::npos
+ || line_start.find( "failed-test-file" ) != string::npos
+ || line_start.find( "command-file-dump" ) != string::npos )
+ {
+ mgr.stop_message( content );
+ content = "\n";
+ capture_lines = true;
+ }
+
+ else if ( capture_lines ) // hang onto lines for possible later use
+ {
+ append_html( line, content );;
+ content += "\n";
+ }
+ }
+
+ mgr.stop_message( content );
+ if (input != &std::cin)
+ delete input;
+ return 0;
+}
diff --git a/tools/regression/src/process_jam_log.py b/tools/regression/src/process_jam_log.py
new file mode 100755
index 0000000000..a7722b9d7a
--- /dev/null
+++ b/tools/regression/src/process_jam_log.py
@@ -0,0 +1,468 @@
+#!/usr/bin/python
+# Copyright 2008 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import re
+import optparse
+import time
+import xml.dom.minidom
+import xml.dom.pulldom
+from xml.sax.saxutils import unescape, escape
+import os.path
+
+#~ Process a bjam XML log into the XML log format for Boost result processing.
+class BJamLog2Results:
+
+ def __init__(self,args=None):
+ opt = optparse.OptionParser(
+ usage="%prog [options] input")
+ opt.add_option( '--output',
+ help="output file" )
+ opt.add_option( '--runner',
+ help="runner ID (e.g. 'Metacomm')" )
+ opt.add_option( '--comment',
+ help="an HTML comment file to be inserted in the reports" )
+ opt.add_option( '--tag',
+ help="the tag for the results" )
+ opt.add_option( '--incremental',
+ help="do incremental run (do not remove previous binaries)",
+ action='store_true' )
+ opt.add_option( '--platform' )
+ opt.add_option( '--source' )
+ opt.add_option( '--revision' )
+ self.output = None
+ self.runner = None
+ self.comment='comment.html'
+ self.tag='trunk'
+ self.incremental=False
+ self.platform=''
+ self.source='SVN'
+ self.revision=None
+ self.input = []
+ ( _opt_, self.input ) = opt.parse_args(args,self)
+ if self.incremental:
+ run_type = 'incremental'
+ else:
+ run_type = 'full'
+ self.results = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?>
+<test-run
+ source="%(source)s"
+ runner="%(runner)s"
+ timestamp=""
+ platform="%(platform)s"
+ tag="%(tag)s"
+ run-type="%(run-type)s"
+ revision="%(revision)s">
+</test-run>
+''' % {
+ 'source' : self.source,
+ 'runner' : self.runner,
+ 'platform' : self.platform,
+ 'tag' : self.tag,
+ 'run-type' : run_type,
+ 'revision' : self.revision,
+ } )
+
+ self.test = {}
+ self.target_to_test = {}
+ self.target = {}
+ self.parent = {}
+ self.log = {}
+
+ self.add_log()
+ self.gen_output()
+
+ #~ print self.test
+ #~ print self.target
+
+ def add_log(self):
+ if self.input[0]:
+ bjam_xml = self.input[0]
+ else:
+ bjam_xml = self.input[1]
+ events = xml.dom.pulldom.parse(bjam_xml)
+ context = []
+ test_run = self.results.documentElement
+ for (event,node) in events:
+ if event == xml.dom.pulldom.START_ELEMENT:
+ context.append(node)
+ if node.nodeType == xml.dom.Node.ELEMENT_NODE:
+ x_f = self.x_name_(*context)
+ if x_f:
+ events.expandNode(node)
+ # expanding eats the end element, hence walking us out one level
+ context.pop()
+ # call the translator, and add returned items to the result
+ items = (x_f[1])(node)
+ if items:
+ for item in items:
+ if item:
+ test_run.appendChild(self.results.createTextNode("\n"))
+ test_run.appendChild(item)
+ elif event == xml.dom.pulldom.END_ELEMENT:
+ context.pop()
+ #~ Add the log items nwo that we've collected all of them.
+ items = self.log.values()
+ if items:
+ for item in items:
+ if item:
+ test_run.appendChild(self.results.createTextNode("\n"))
+ test_run.appendChild(item)
+
+ def gen_output(self):
+ if self.output:
+ out = open(self.output,'w')
+ else:
+ out = sys.stdout
+ if out:
+ self.results.writexml(out,encoding='utf-8')
+
+ def tostring(self):
+ return self.results.toxml('utf-8')
+
+ def x_name_(self, *context, **kwargs):
+ node = None
+ names = [ ]
+ for c in context:
+ if c:
+ if not isinstance(c,xml.dom.Node):
+ suffix = '_'+c.replace('-','_').replace('#','_')
+ else:
+ suffix = '_'+c.nodeName.replace('-','_').replace('#','_')
+ node = c
+ names.append('x')
+ names = map(lambda x: x+suffix,names)
+ if node:
+ for name in names:
+ if hasattr(self,name):
+ return (name,getattr(self,name))
+ return None
+
+ def x(self, *context, **kwargs):
+ node = None
+ names = [ ]
+ for c in context:
+ if c:
+ if not isinstance(c,xml.dom.Node):
+ suffix = '_'+c.replace('-','_').replace('#','_')
+ else:
+ suffix = '_'+c.nodeName.replace('-','_').replace('#','_')
+ node = c
+ names.append('x')
+ names = map(lambda x: x+suffix,names)
+ if node:
+ for name in names:
+ if hasattr(self,name):
+ return getattr(self,name)(node,**kwargs)
+ else:
+ assert False, 'Unknown node type %s'%(name)
+ return None
+
+ #~ The timestamp goes to the corresponding attribute in the result.
+ def x_build_timestamp( self, node ):
+ test_run = self.results.documentElement
+ test_run.setAttribute('timestamp',self.get_data(node).strip())
+ return None
+
+ #~ Comment file becomes a comment node.
+ def x_build_comment( self, node ):
+ comment = None
+ if self.comment:
+ comment_f = open(self.comment)
+ if comment_f:
+ comment = comment_f.read()
+ comment_f.close()
+ if not comment:
+ comment = ''
+ return [self.new_text('comment',comment)]
+
+ #~ Tests are remembered for future reference.
+ def x_build_test( self, node ):
+ test_run = self.results.documentElement
+ test_node = node
+ test_name = test_node.getAttribute('name')
+ self.test[test_name] = {
+ 'library' : '/'.join(test_name.split('/')[0:-1]),
+ 'test-name' : test_name.split('/')[-1],
+ 'test-type' : test_node.getAttribute('type').lower(),
+ 'test-program' : self.get_child_data(test_node,tag='source',strip=True),
+ 'target' : self.get_child_data(test_node,tag='target',strip=True),
+ 'info' : self.get_child_data(test_node,tag='info',strip=True)
+ }
+ #~ Add a lookup for the test given the test target.
+ self.target_to_test[self.test[test_name]['target']] = test_name
+ #~ print "--- %s\n => %s" %(self.test[test_name]['target'],test_name)
+ return None
+
+ #~ Process the target dependency DAG into an ancestry tree so we can look up
+ #~ which top-level library and test targets specific build actions correspond to.
+ def x_build_targets_target( self, node ):
+ test_run = self.results.documentElement
+ target_node = node
+ name = self.get_child_data(target_node,tag='name',strip=True)
+ path = self.get_child_data(target_node,tag='path',strip=True)
+ jam_target = self.get_child_data(target_node,tag='jam-target',strip=True)
+ #~ print "--- target :: %s" %(name)
+ #~ Map for jam targets to virtual targets.
+ self.target[jam_target] = {
+ 'name' : name,
+ 'path' : path
+ }
+ #~ Create the ancestry.
+ dep_node = self.get_child(self.get_child(target_node,tag='dependencies'),tag='dependency')
+ while dep_node:
+ child = self.get_data(dep_node,strip=True)
+ child_jam_target = '<p%s>%s' % (path,child.split('//',1)[1])
+ self.parent[child_jam_target] = jam_target
+ #~ print "--- %s\n ^ %s" %(jam_target,child_jam_target)
+ dep_node = self.get_sibling(dep_node.nextSibling,tag='dependency')
+ return None
+
+ #~ Given a build action log, process into the corresponding test log and
+ #~ specific test log sub-part.
+ def x_build_action( self, node ):
+ test_run = self.results.documentElement
+ action_node = node
+ name = self.get_child(action_node,tag='name')
+ if name:
+ name = self.get_data(name)
+ #~ Based on the action, we decide what sub-section the log
+ #~ should go into.
+ action_type = None
+ if re.match('[^%]+%[^.]+[.](compile)',name):
+ action_type = 'compile'
+ elif re.match('[^%]+%[^.]+[.](link|archive)',name):
+ action_type = 'link'
+ elif re.match('[^%]+%testing[.](capture-output)',name):
+ action_type = 'run'
+ elif re.match('[^%]+%testing[.](expect-failure|expect-success)',name):
+ action_type = 'result'
+ #~ print "+ [%s] %s %s :: %s" %(action_type,name,'','')
+ if action_type:
+ #~ Get the corresponding test.
+ (target,test) = self.get_test(action_node,type=action_type)
+ #~ Skip action that have no correspoding test as they are
+ #~ regular build actions and don't need to show up in the
+ #~ regression results.
+ if not test:
+ return None
+ #~ And the log node, which we will add the results to.
+ log = self.get_log(action_node,test)
+ #~ print "--- [%s] %s %s :: %s" %(action_type,name,target,test)
+ #~ Collect some basic info about the action.
+ result_data = "%(info)s\n\n%(command)s\n%(output)s\n" % {
+ 'command' : self.get_action_command(action_node,action_type),
+ 'output' : self.get_action_output(action_node,action_type),
+ 'info' : self.get_action_info(action_node,action_type)
+ }
+ #~ For the test result status we find the appropriate node
+ #~ based on the type of test. Then adjust the result status
+ #~ acorrdingly. This makes the result status reflect the
+ #~ expectation as the result pages post processing does not
+ #~ account for this inversion.
+ action_tag = action_type
+ if action_type == 'result':
+ if re.match(r'^compile',test['test-type']):
+ action_tag = 'compile'
+ elif re.match(r'^link',test['test-type']):
+ action_tag = 'link'
+ elif re.match(r'^run',test['test-type']):
+ action_tag = 'run'
+ #~ The result sub-part we will add this result to.
+ result_node = self.get_child(log,tag=action_tag)
+ if action_node.getAttribute('status') == '0':
+ action_result = 'succeed'
+ else:
+ action_result = 'fail'
+ if not result_node:
+ #~ If we don't have one already, create it and add the result.
+ result_node = self.new_text(action_tag,result_data,
+ result = action_result,
+ timestamp = action_node.getAttribute('start'))
+ log.appendChild(self.results.createTextNode("\n"))
+ log.appendChild(result_node)
+ else:
+ #~ For an existing result node we set the status to fail
+ #~ when any of the individual actions fail, except for result
+ #~ status.
+ if action_type != 'result':
+ result = result_node.getAttribute('result')
+ if action_node.getAttribute('status') != '0':
+ result = 'fail'
+ else:
+ result = action_result
+ result_node.setAttribute('result',result)
+ result_node.appendChild(self.results.createTextNode("\n"))
+ result_node.appendChild(self.results.createTextNode(result_data))
+ return None
+
+ #~ The command executed for the action. For run actions we omit the command
+ #~ as it's just noise.
+ def get_action_command( self, action_node, action_type ):
+ if action_type != 'run':
+ return self.get_child_data(action_node,tag='command')
+ else:
+ return ''
+
+ #~ The command output.
+ def get_action_output( self, action_node, action_type ):
+ return self.get_child_data(action_node,tag='output',default='')
+
+ #~ Some basic info about the action.
+ def get_action_info( self, action_node, action_type ):
+ info = ""
+ #~ The jam action and target.
+ info += "%s %s\n" %(self.get_child_data(action_node,tag='name'),
+ self.get_child_data(action_node,tag='path'))
+ #~ The timing of the action.
+ info += "Time: (start) %s -- (end) %s -- (user) %s -- (system) %s\n" %(
+ action_node.getAttribute('start'), action_node.getAttribute('end'),
+ action_node.getAttribute('user'), action_node.getAttribute('system'))
+ #~ And for compiles some context that may be hidden if using response files.
+ if action_type == 'compile':
+ define = self.get_child(self.get_child(action_node,tag='properties'),name='define')
+ while define:
+ info += "Define: %s\n" %(self.get_data(define,strip=True))
+ define = self.get_sibling(define.nextSibling,name='define')
+ return info
+
+ #~ Find the test corresponding to an action. For testing targets these
+ #~ are the ones pre-declared in the --dump-test option. For libraries
+ #~ we create a dummy test as needed.
+ def get_test( self, node, type = None ):
+ jam_target = self.get_child_data(node,tag='jam-target')
+ base = self.target[jam_target]['name']
+ target = jam_target
+ while target in self.parent:
+ target = self.parent[target]
+ #~ print "--- TEST: %s ==> %s" %(jam_target,target)
+ #~ main-target-type is a precise indicator of what the build target is
+ #~ proginally meant to be.
+ main_type = self.get_child_data(self.get_child(node,tag='properties'),
+ name='main-target-type',strip=True)
+ if main_type == 'LIB' and type:
+ lib = self.target[target]['name']
+ if not lib in self.test:
+ self.test[lib] = {
+ 'library' : re.search(r'libs/([^/]+)',lib).group(1),
+ 'test-name' : os.path.basename(lib),
+ 'test-type' : 'lib',
+ 'test-program' : os.path.basename(lib),
+ 'target' : lib
+ }
+ test = self.test[lib]
+ else:
+ target_name_ = self.target[target]['name']
+ if self.target_to_test.has_key(target_name_):
+ test = self.test[self.target_to_test[target_name_]]
+ else:
+ test = None
+ return (base,test)
+
+ #~ Find, or create, the test-log node to add results to.
+ def get_log( self, node, test ):
+ target_directory = os.path.dirname(self.get_child_data(
+ node,tag='path',strip=True))
+ target_directory = re.sub(r'.*[/\\]bin[.]v2[/\\]','',target_directory)
+ target_directory = re.sub(r'[\\]','/',target_directory)
+ if not target_directory in self.log:
+ if 'info' in test and test['info'] == 'always_show_run_output':
+ show_run_output = 'true'
+ else:
+ show_run_output = 'false'
+ self.log[target_directory] = self.new_node('test-log',
+ library=test['library'],
+ test_name=test['test-name'],
+ test_type=test['test-type'],
+ test_program=test['test-program'],
+ toolset=self.get_toolset(node),
+ target_directory=target_directory,
+ show_run_output=show_run_output)
+ return self.log[target_directory]
+
+ #~ The precise toolset from the build properties.
+ def get_toolset( self, node ):
+ toolset = self.get_child_data(self.get_child(node,tag='properties'),
+ name='toolset',strip=True)
+ toolset_version = self.get_child_data(self.get_child(node,tag='properties'),
+ name='toolset-%s:version'%toolset,strip=True)
+ return '%s-%s' %(toolset,toolset_version)
+
+ #~ XML utilities...
+
+ def get_sibling( self, sibling, tag = None, id = None, name = None, type = None ):
+ n = sibling
+ while n:
+ found = True
+ if type and found:
+ found = found and type == n.nodeType
+ if tag and found:
+ found = found and tag == n.nodeName
+ if (id or name) and found:
+ found = found and n.nodeType == xml.dom.Node.ELEMENT_NODE
+ if id and found:
+ if n.hasAttribute('id'):
+ found = found and n.getAttribute('id') == id
+ else:
+ found = found and n.hasAttribute('id') and n.getAttribute('id') == id
+ if name and found:
+ found = found and n.hasAttribute('name') and n.getAttribute('name') == name
+ if found:
+ return n
+ n = n.nextSibling
+ return None
+
+ def get_child( self, root, tag = None, id = None, name = None, type = None ):
+ return self.get_sibling(root.firstChild,tag=tag,id=id,name=name,type=type)
+
+ def get_data( self, node, strip = False, default = None ):
+ data = None
+ if node:
+ data_node = None
+ if not data_node:
+ data_node = self.get_child(node,tag='#text')
+ if not data_node:
+ data_node = self.get_child(node,tag='#cdata-section')
+ data = ""
+ while data_node:
+ data += data_node.data
+ data_node = data_node.nextSibling
+ if data_node:
+ if data_node.nodeName != '#text' \
+ and data_node.nodeName != '#cdata-section':
+ data_node = None
+ if not data:
+ data = default
+ else:
+ if strip:
+ data = data.strip()
+ return data
+
+ def get_child_data( self, root, tag = None, id = None, name = None, strip = False, default = None ):
+ return self.get_data(self.get_child(root,tag=tag,id=id,name=name),strip=strip,default=default)
+
+ def new_node( self, tag, *child, **kwargs ):
+ result = self.results.createElement(tag)
+ for k in kwargs.keys():
+ if kwargs[k] != '':
+ if k == 'id':
+ result.setAttribute('id',kwargs[k])
+ elif k == 'klass':
+ result.setAttribute('class',kwargs[k])
+ else:
+ result.setAttribute(k.replace('_','-'),kwargs[k])
+ for c in child:
+ if c:
+ result.appendChild(c)
+ return result
+
+ def new_text( self, tag, data, **kwargs ):
+ result = self.new_node(tag,**kwargs)
+ data = data.strip()
+ if len(data) > 0:
+ result.appendChild(self.results.createTextNode(data))
+ return result
+
+
+if __name__ == '__main__': BJamLog2Results()
diff --git a/tools/regression/src/regression-logs.pl b/tools/regression/src/regression-logs.pl
new file mode 100644
index 0000000000..97cd4e9acd
--- /dev/null
+++ b/tools/regression/src/regression-logs.pl
@@ -0,0 +1,197 @@
+#!/usr/bin/perl
+
+#~ Copyright 2003, Rene Rivera.
+#~ Use, modification and distribution are subject to the Boost Software
+#~ License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+#~ http://www.boost.org/LICENSE_1_0.txt)
+
+use FileHandle;
+use Time::Local;
+
+# Get the whle percent value
+#
+sub percent_value
+{
+ my ($count,$total) = @_;
+ my $percent = int (($count/$total)*100+0.5);
+ if ($count > 0 && $percent == 0) { $percent = 1; }
+ if ($count < $total && $percent == 100) { $percent = 99; }
+ return $percent;
+}
+
+# Generate item html for the pass column.
+#
+sub result_info_pass
+{
+ my ($color,$pass,$warn,$fail,$missing) = @_;
+ my $percent = 100-percent_value($fail+$missing,$pass+$warn+$fail+$missing);
+ return "<font color=\"$color\"><font size=\"+1\">$percent%</font><br>($warn&nbsp;warnings)</font>";
+}
+
+# Generate item html for the fail column.
+#
+sub result_info_fail
+{
+ my ($color,$pass,$warn,$fail,$missing) = @_;
+ my $percent = percent_value($fail+$missing,$pass+$warn+$fail+$missing);
+ return "<font color=\"$color\"><font size=\"+1\">$percent%</font><br>($fail)</font>";
+}
+
+# Generate an age highlighted run date string.
+# Use as: data_info(run-date-html)
+#
+sub date_info
+{
+ my %m = ('January',0,'February',1,'March',2,'April',3,'May',4,'June',5,
+ 'July',6,'August',7,'September',8,'October',9,'November',10,'December',11);
+ my @d = split(/ |:/,$_[0]);
+ my ($hour,$min,$sec,$day,$month,$year) = ($d[0],$d[1],$d[2],$d[4],$m{$d[5]},$d[6]);
+ #print "<!-- $hour.$min.$sec.$day.$month.$year -->\n";
+ my $test_t = timegm($sec,$min,$hour,$day,$month,$year);
+ my $age = time-$test_t;
+ my $age_days = $age/(60*60*24);
+ #print "<!-- $age_days days old -->\n";
+ my $age_html = "<font>";
+ if ($age_days <= 2) { }
+ elsif ($age_days <= 14) { $age_html = "<font color=\"#FF9900\">"; }
+ else { $age_html = "<font color=\"#FF0000\">"; }
+ return $age_html.$_[0]."</font>";
+}
+
+# Generate an age string based on the run date.
+# Use as: age_info(run-date-html)
+#
+sub age_info
+{
+ my %m = ('January',0,'February',1,'March',2,'April',3,'May',4,'June',5,
+ 'July',6,'August',7,'September',8,'October',9,'November',10,'December',11);
+ my @d = split(/ |:/,$_[0]);
+ my ($hour,$min,$sec,$day,$month,$year) = ($d[0],$d[1],$d[2],$d[4],$m{$d[5]},$d[6]);
+ #print "<!-- $hour.$min.$sec.$day.$month.$year -->\n";
+ my $test_t = timegm($sec,$min,$hour,$day,$month,$year);
+ my $age = time-$test_t;
+ my $age_days = $age/(60*60*24);
+ #print "<!-- $age_days days old -->\n";
+ my $age_html = "<font>";
+ if ($age_days <= 2) { }
+ elsif ($age_days <= 14) { $age_html = "<font color=\"#FF9900\">"; }
+ else { $age_html = "<font color=\"#FF0000\">"; }
+ if ($age_days <= 1) { $age_html = $age_html."today"; }
+ elsif ($age_days <= 2) { $age_html = $age_html."yesterday"; }
+ elsif ($age_days < 14) { my $days = int $age_days; $age_html = $age_html.$days." days"; }
+ elsif ($age_days < 7*8) { my $weeks = int $age_days/7; $age_html = $age_html.$weeks." weeks"; }
+ else { my $months = int $age_days/28; $age_html = $age_html.$months." months"; }
+ return $age_html."</font>";
+}
+
+#~ foreach my $k (sort keys %ENV)
+#~ {
+ #~ print "<!-- $k = $ENV{$k} -->\n";
+#~ }
+my $logdir = "$ENV{PWD}";
+#~ my $logdir = "C:\\CVSROOTs\\Boost\\boost\\status";
+opendir LOGS, "$logdir";
+my @logs = grep /.*links[^.]*\.html$/, readdir LOGS;
+closedir LOGS;
+my @bgcolor = ( "bgcolor=\"#EEEEFF\"", "" );
+my $row = 0;
+print "<table>\n";
+print "<tr>\n",
+ "<th align=\"left\" bgcolor=\"#DDDDDD\">Platform</th>\n",
+ "<th align=\"left\" bgcolor=\"#DDDDDD\">Run Date</th>\n",
+ "<th align=\"left\" bgcolor=\"#DDDDDD\">Age</th>\n",
+ "<th align=\"left\" bgcolor=\"#DDDDDD\">Compilers</th>\n",
+ "<th align=\"left\" bgcolor=\"#DDDDDD\">Pass</th>\n",
+ "<th align=\"left\" bgcolor=\"#DDDDDD\">Fail</th>\n",
+ "</tr>\n";
+foreach $l (sort { lc($a) cmp lc($b) } @logs)
+{
+ my $log = $l;
+ $log =~ s/-links//s;
+ my ($spec) = ($log =~ /cs-([^\.]+)/);
+ my $fh = new FileHandle;
+ if ($fh->open("<$logdir/$log"))
+ {
+ my $content = join('',$fh->getlines());
+ $fh->close;
+ my ($status) = ($content =~ /(<h1>Compiler(.(?!<\/td>))+.)/si);
+ my ($platform) = ($status =~ /Status: ([^<]+)/si);
+ my ($run_date) = ($status =~ /Date:<\/b> ([^<]+)/si);
+ $run_date =~ s/, /<br>/g;
+ my ($compilers) = ($content =~ /Test Type<\/a><\/t[dh]>((.(?!<\/tr>))+.)/si);
+ if ($compilers eq "") { next; }
+ $compilers =~ s/-<br>//g;
+ $compilers =~ s/<\/td>//g;
+ my @compiler = ($compilers =~ /<td>(.*)$/gim);
+ my $count = @compiler;
+ my @results = ($content =~ /(>Pass<|>Warn<|>Fail<|>Missing<)/gi);
+ my $test_count = (scalar @results)/$count;
+ my @pass = map { 0 } (1..$count);
+ my @warn = map { 0 } (1..$count);
+ my @fail = map { 0 } (1..$count);
+ my @missing = map { 0 } (1..$count);
+ my @total = map { 0 } (1..$count);
+ #~ print "<!-- ",
+ #~ "pass = ",join(',',@pass)," ",
+ #~ "warn = ",join(',',@warn)," ",
+ #~ "fail = ",join(',',@fail)," ",
+ #~ "missing = ",join(',',@missing)," ",
+ #~ "total = ",join(',',@total)," ",
+ #~ " -->\n";
+ for my $t (1..$test_count)
+ {
+ my $r0 = (($t-1)*$count);
+ my $r1 = (($t-1)*$count+$count-1);
+ my @r = @results[(($t-1)*$count)..(($t-1)*$count+$count-1)];
+ #~ print "<!-- ",
+ #~ "result = ",join(',',@r)," ",
+ #~ "range = ",$r0,"..",$r1," (",(scalar @results),")",
+ #~ " -->\n";
+ for my $c (1..$count)
+ {
+ if ($r[$c-1] =~ /Pass/i) { ++$pass[$c-1]; }
+ elsif ($r[$c-1] =~ /Warn/i) { ++$warn[$c-1]; }
+ elsif ($r[$c-1] =~ /Fail/i) { ++$fail[$c-1]; }
+ elsif ($r[$c-1] =~ /Missing/i) { ++$missing[$c-1]; }
+ ++$total[$c-1];
+ }
+ }
+ #~ print "<!-- ",
+ #~ "pass = ",join(',',@pass)," ",
+ #~ "warn = ",join(',',@warn)," ",
+ #~ "fail = ",join(',',@fail)," ",
+ #~ "missing = ",join(',',@missing)," ",
+ #~ "total = ",join(',',@total)," ",
+ #~ " -->\n";
+ for my $comp (1..(scalar @compiler))
+ {
+ my @lines = split(/<br>/,$compiler[$comp-1]);
+ if (@lines > 2) { $compiler[$comp-1] = join(' ',@lines[0..(scalar @lines)-2])."<br>".$lines[(scalar @lines)-1]; }
+ }
+ print
+ "<tr>\n",
+ "<td rowspan=\"$count\" valign=\"top\"><font size=\"+1\">$platform</font><br>(<a href=\"./$log\">$spec</a>)</td>\n",
+ "<td rowspan=\"$count\" valign=\"top\">",$run_date,"</td>\n",
+ "<td rowspan=\"$count\" valign=\"top\">",age_info($run_date),"</td>\n",
+ "<td valign=\"top\" ",$bgcolor[$row],">",$compiler[0],"</td>\n",
+ "<td valign=\"top\" ",$bgcolor[$row],">",result_info_pass("#000000",$pass[0],$warn[0],$fail[0],$missing[0]),"</td>\n",
+ "<td valign=\"top\" ",$bgcolor[$row],">",result_info_fail("#FF0000",$pass[0],$warn[0],$fail[0],$missing[0]),"</td>\n",
+ "</tr>\n";
+ $row = ($row+1)%2;
+ foreach my $c (1..($count-1))
+ {
+ print
+ "<tr>\n",
+ "<td valign=\"top\" ",$bgcolor[$row],">",$compiler[$c],"</td>\n",
+ "<td valign=\"top\" ",$bgcolor[$row],">",result_info_pass("#000000",$pass[$c],$warn[$c],$fail[$c],$missing[$c]),"</td>\n",
+ "<td valign=\"top\" ",$bgcolor[$row],">",result_info_fail("#FF0000",$pass[$c],$warn[$c],$fail[$c],$missing[$c]),"</td>\n",
+ "</tr>\n";
+ $row = ($row+1)%2;
+ }
+ print
+ "<tr>\n",
+ "<td colspan=\"7\"><hr size=\"1\" noshade></td>\n",
+ "</tr>\n";
+ }
+}
+print "</table>\n";
diff --git a/tools/regression/src/regression.py b/tools/regression/src/regression.py
new file mode 100644
index 0000000000..0fa9603ac4
--- /dev/null
+++ b/tools/regression/src/regression.py
@@ -0,0 +1,908 @@
+#!/usr/bin/python
+
+# Copyright MetaCommunications, Inc. 2003-2007
+# Copyright Redshift Software, Inc. 2007
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import glob
+import optparse
+import os
+import os.path
+import platform
+import sys
+import time
+
+#~ Place holder for xsl_reports/util module
+utils = None
+
+repo_root = {
+ 'anon' : 'http://svn.boost.org/svn/boost/',
+ 'user' : 'https://svn.boost.org/svn/boost/'
+ }
+repo_path = {
+ 'trunk' : 'trunk',
+ 'release' : 'branches/release',
+ 'build' : 'trunk/tools/build/v2',
+ 'jam' : 'trunk/tools/build/v2/engine',
+ 'regression' : 'trunk/tools/regression',
+ 'boost-build.jam'
+ : 'trunk/boost-build.jam'
+ }
+
+class runner:
+
+ def __init__(self,root):
+ commands = map(
+ lambda m: m[8:].replace('_','-'),
+ filter(
+ lambda m: m.startswith('command_'),
+ runner.__dict__.keys())
+ )
+ commands.sort()
+ commands = "commands: %s" % ', '.join(commands)
+
+ opt = optparse.OptionParser(
+ usage="%prog [options] [commands]",
+ description=commands)
+
+ #~ Base Options:
+ opt.add_option( '--runner',
+ help="runner ID (e.g. 'Metacomm')" )
+ opt.add_option( '--comment',
+ help="an HTML comment file to be inserted in the reports" )
+ opt.add_option( '--tag',
+ help="the tag for the results" )
+ opt.add_option( '--toolsets',
+ help="comma-separated list of toolsets to test with" )
+ opt.add_option( '--libraries',
+ help="comma separated list of libraries to test")
+ opt.add_option( '--incremental',
+ help="do incremental run (do not remove previous binaries)",
+ action='store_true' )
+ opt.add_option( '--timeout',
+ help="specifies the timeout, in minutes, for a single test run/compilation",
+ type='int' )
+ opt.add_option( '--bjam-options',
+ help="options to pass to the regression test" )
+ opt.add_option( '--bjam-toolset',
+ help="bootstrap toolset for 'bjam' executable" )
+ opt.add_option( '--pjl-toolset',
+ help="bootstrap toolset for 'process_jam_log' executable" )
+ opt.add_option( '--platform' )
+
+ #~ Source Options:
+ opt.add_option( '--user',
+ help="Boost SVN user ID" )
+ opt.add_option( '--local',
+ help="the name of the boost tarball" )
+ opt.add_option( '--force-update',
+ help="do an SVN update (if applicable) instead of a clean checkout, even when performing a full run",
+ action='store_true' )
+ opt.add_option( '--have-source',
+ help="do neither a tarball download nor an SVN update; used primarily for testing script changes",
+ action='store_true' )
+
+ #~ Connection Options:
+ opt.add_option( '--ftp',
+ help="FTP URL to upload results to." )
+ opt.add_option( '--proxy',
+ help="HTTP proxy server address and port (e.g.'http://www.someproxy.com:3128')" )
+ opt.add_option( '--ftp-proxy',
+ help="FTP proxy server (e.g. 'ftpproxy')" )
+ opt.add_option( '--dart-server',
+ help="the dart server to send results to" )
+
+ #~ Debug Options:
+ opt.add_option( '--debug-level',
+ help="debugging level; controls the amount of debugging output printed",
+ type='int' )
+ opt.add_option( '--send-bjam-log',
+ help="send full bjam log of the regression run",
+ action='store_true' )
+ opt.add_option( '--mail',
+ help="email address to send run notification to" )
+ opt.add_option( '--smtp-login',
+ help="STMP server address/login information, in the following form: <user>:<password>@<host>[:<port>]" )
+ opt.add_option( '--skip-tests',
+ help="do not run bjam; used for testing script changes",
+ action='store_true' )
+
+ #~ Defaults
+ self.runner = None
+ self.comment='comment.html'
+ self.tag='trunk'
+ self.toolsets=None
+ self.libraries=None
+ self.incremental=False
+ self.timeout=5
+ self.bjam_options=''
+ self.bjam_toolset=''
+ self.pjl_toolset=''
+ self.platform=self.platform_name()
+ self.user='anonymous'
+ self.local=None
+ self.force_update=False
+ self.have_source=False
+ self.ftp=None
+ self.proxy=None
+ self.ftp_proxy=None
+ self.dart_server=None
+ self.debug_level=0
+ self.send_bjam_log=False
+ self.mail=None
+ self.smtp_login=None
+ self.skip_tests=False
+ ( _opt_, self.actions ) = opt.parse_args(None,self)
+ if not self.actions or self.actions == []:
+ self.actions = [ 'regression' ]
+
+ #~ Initialize option dependent values.
+ self.regression_root = root
+ self.boost_root = os.path.join( self.regression_root, 'boost' )
+ self.regression_results = os.path.join( self.regression_root, 'results' )
+ if self.pjl_toolset != 'python':
+ self.regression_log = os.path.join( self.regression_results, 'bjam.log' )
+ else:
+ self.regression_log = os.path.join( self.regression_results, 'bjam.xml' )
+ self.tools_bb_root = os.path.join( self.regression_root,'tools_bb' )
+ self.tools_bjam_root = os.path.join( self.regression_root,'tools_bjam' )
+ self.tools_regression_root = os.path.join( self.regression_root,'tools_regression' )
+ self.xsl_reports_dir = os.path.join( self.tools_regression_root, 'xsl_reports' )
+ self.timestamp_path = os.path.join( self.regression_root, 'timestamp' )
+ if sys.platform == 'win32':
+ self.patch_boost = 'patch_boost.bat'
+ self.bjam = { 'name' : 'bjam.exe' }
+ self.process_jam_log = { 'name' : 'process_jam_log.exe' }
+ elif sys.platform == 'cygwin':
+ self.patch_boost = 'patch_boost'
+ self.bjam = { 'name' : 'bjam.exe' }
+ self.process_jam_log = { 'name' : 'process_jam_log.exe' }
+ else:
+ self.patch_boost = 'patch_boost'
+ self.bjam = { 'name' : 'bjam' }
+ self.process_jam_log = { 'name' : 'process_jam_log' }
+ self.bjam = {
+ 'name' : self.bjam['name'],
+ 'build_cmd' : self.bjam_build_cmd,
+ 'path' : os.path.join(self.regression_root,self.bjam['name']),
+ 'source_dir' : self.tools_bjam_root,
+ 'build_dir' : self.tools_bjam_root,
+ 'build_args' : ''
+ }
+ self.process_jam_log = {
+ 'name' : self.process_jam_log['name'],
+ 'build_cmd' : self.bjam_cmd,
+ 'path' : os.path.join(self.regression_root,self.process_jam_log['name']),
+ 'source_dir' : os.path.join(self.tools_regression_root,'build'),
+ 'build_dir' : os.path.join(self.tools_regression_root,'build'),
+ 'build_args' : 'process_jam_log -d2'
+ }
+
+ if self.debug_level > 0:
+ self.log('Regression root = %s'%self.regression_root)
+ self.log('Boost root = %s'%self.boost_root)
+ self.log('Regression results = %s'%self.regression_results)
+ self.log('Regression log = %s'%self.regression_log)
+ self.log('BB root = %s'%self.tools_bb_root)
+ self.log('Bjam root = %s'%self.tools_bjam_root)
+ self.log('Tools root = %s'%self.tools_regression_root)
+ self.log('XSL reports dir = %s'%self.xsl_reports_dir)
+ self.log('Timestamp = %s'%self.timestamp_path)
+ self.log('Patch Boost script = %s'%self.patch_boost)
+
+ if self.libraries is not None:
+ self.libraries = self.libraries.split(",")
+ # Boost.Build depends on any having run
+ if "build" in self.libraries and "any" not in self.libraries:
+ self.libraries += ["any"]
+
+ self.bjam_options += ' "--limit-tests=' + \
+ "|".join(lib for lib in self.libraries if lib != "build") + '"'
+
+ self.main()
+
+ #~ The various commands that make up the testing sequence...
+
+ def command_cleanup(self,*args):
+ if not args or args == None or args == []: args = [ 'source', 'bin' ]
+
+ if 'source' in args:
+ self.log( 'Cleaning up "%s" directory ...' % self.boost_root )
+ self.rmtree( self.boost_root )
+
+ if 'bin' in args:
+ boost_bin_dir = os.path.join( self.boost_root, 'bin' )
+ self.log( 'Cleaning up "%s" directory ...' % boost_bin_dir )
+ self.rmtree( boost_bin_dir )
+
+ boost_binv2_dir = os.path.join( self.boost_root, 'bin.v2' )
+ self.log( 'Cleaning up "%s" directory ...' % boost_binv2_dir )
+ self.rmtree( boost_binv2_dir )
+
+ self.log( 'Cleaning up "%s" directory ...' % self.regression_results )
+ self.rmtree( self.regression_results )
+
+ def command_get_tools(self):
+ #~ Get Boost.Build v2...
+ self.log( 'Getting Boost.Build v2...' )
+ if self.user and self.user != '':
+ os.chdir( os.path.dirname(self.tools_bb_root) )
+ self.svn_command( 'co %s %s' % (
+ self.svn_repository_url(repo_path['build']),
+ os.path.basename(self.tools_bb_root) ) )
+ else:
+ self.retry( lambda: self.download_tarball(
+ os.path.basename(self.tools_bb_root)+".tar.bz2",
+ self.tarball_url(repo_path['build']) ) )
+ self.unpack_tarball(
+ self.tools_bb_root+".tar.bz2",
+ os.path.basename(self.tools_bb_root) )
+ #~ Get Boost.Jam...
+ self.log( 'Getting Boost.Jam...' )
+ if self.user and self.user != '':
+ os.chdir( os.path.dirname(self.tools_bjam_root) )
+ self.svn_command( 'co %s %s' % (
+ self.svn_repository_url(repo_path['jam']),
+ os.path.basename(self.tools_bjam_root) ) )
+ else:
+ self.retry( lambda: self.download_tarball(
+ os.path.basename(self.tools_bjam_root)+".tar.bz2",
+ self.tarball_url(repo_path['jam']) ) )
+ self.unpack_tarball(
+ self.tools_bjam_root+".tar.bz2",
+ os.path.basename(self.tools_bjam_root) )
+ #~ Get the regression tools and utilities...
+ self.log( 'Getting regression tools an utilities...' )
+ if self.user and self.user != '':
+ os.chdir( os.path.dirname(self.tools_regression_root) )
+ self.svn_command( 'co %s %s' % (
+ self.svn_repository_url(repo_path['regression']),
+ os.path.basename(self.tools_regression_root) ) )
+ else:
+ self.retry( lambda: self.download_tarball(
+ os.path.basename(self.tools_regression_root)+".tar.bz2",
+ self.tarball_url(repo_path['regression']) ) )
+ self.unpack_tarball(
+ self.tools_regression_root+".tar.bz2",
+ os.path.basename(self.tools_regression_root) )
+
+ #~ We get a boost-build.jam to make the tool build work even if there's
+ #~ and existing boost-build.jam above the testing root.
+ self.log( 'Getting boost-build.jam...' )
+ self.http_get(
+ self.svn_repository_url(repo_path['boost-build.jam']),
+ os.path.join( self.regression_root, 'boost-build.jam' ) )
+
+ def command_get_source(self):
+ self.refresh_timestamp()
+ self.log( 'Getting sources (%s)...' % self.timestamp() )
+
+ if self.user and self.user != '':
+ self.retry( self.svn_checkout )
+ else:
+ self.retry( self.get_tarball )
+ pass
+
+ def command_update_source(self):
+ if self.user and self.user != '' \
+ or os.path.exists( os.path.join( self.boost_root, '.svn' ) ):
+ open( self.timestamp_path, 'w' ).close()
+ self.log( 'Updating sources from SVN (%s)...' % self.timestamp() )
+ self.retry( self.svn_update )
+ else:
+ self.command_get_source( )
+ pass
+
+ def command_patch(self):
+ self.import_utils()
+ patch_boost_path = os.path.join( self.regression_root, self.patch_boost )
+ if os.path.exists( patch_boost_path ):
+ self.log( 'Found patch file "%s". Executing it.' % patch_boost_path )
+ os.chdir( self.regression_root )
+ utils.system( [ patch_boost_path ] )
+ pass
+
+ def command_setup(self):
+ self.command_patch()
+ self.build_if_needed(self.bjam,self.bjam_toolset)
+ if self.pjl_toolset != 'python':
+ self.build_if_needed(self.process_jam_log,self.pjl_toolset)
+
+ def command_test(self, *args):
+ if not args or args == None or args == []: args = [ "test", "process" ]
+ self.import_utils()
+
+ self.log( 'Making "%s" directory...' % self.regression_results )
+ utils.makedirs( self.regression_results )
+
+ results_libs = os.path.join( self.regression_results, 'libs' )
+ results_status = os.path.join( self.regression_results, 'status' )
+
+ if "clean" in args:
+ self.command_test_clean()
+
+ if "test" in args:
+ self.command_test_run()
+ self.command_test_boost_build()
+
+ if "process" in args:
+ if self.pjl_toolset != 'python':
+ self.command_test_process()
+
+ def command_test_clean(self):
+ results_libs = os.path.join( self.regression_results, 'libs' )
+ results_status = os.path.join( self.regression_results, 'status' )
+ self.rmtree( results_libs )
+ self.rmtree( results_status )
+
+ def command_test_run(self):
+ self.import_utils()
+ if self.pjl_toolset != 'python':
+ test_cmd = '%s -d2 preserve-test-targets=off --dump-tests %s "--build-dir=%s" >>"%s" 2>&1' % (
+ self.bjam_cmd( self.toolsets ),
+ self.bjam_options,
+ self.regression_results,
+ self.regression_log )
+ else:
+ test_cmd = '%s -d1 preserve-test-targets=off --dump-tests --verbose-test %s "--build-dir=%s" "--out-xml=%s"' % (
+ self.bjam_cmd( self.toolsets ),
+ self.bjam_options,
+ self.regression_results,
+ self.regression_log )
+ self.log( 'Starting tests (%s)...' % test_cmd )
+ cd = os.getcwd()
+ os.chdir( os.path.join( self.boost_root, 'status' ) )
+ utils.system( [ test_cmd ] )
+ os.chdir( cd )
+
+ def command_test_boost_build(self):
+ if self.libraries is not None and "build" not in self.libraries:
+ return
+
+ self.import_utils()
+ self.log( 'Running Boost.Build tests' )
+ # Find the true names of the toolsets used for testing
+ toolsets = os.listdir(os.path.join(self.regression_results,
+ "boost/bin.v2/libs/any/test/any_test.test"));
+ for t in toolsets:
+ d = os.path.join(self.regression_results, ("boost-build-%s" % (t)))
+ utils.makedirs (d)
+ fn = os.path.join(d, "test_log.xml")
+ cd = os.getcwd()
+ try:
+ os.chdir (os.path.join (self.boost_root, 'tools/build/v2/test'));
+ bjam_path = os.path.dirname (self.tool_path( self.bjam ))
+ self.log( "Using bjam binary in '%s'" % (bjam_path))
+ os.putenv('PATH', bjam_path + os.pathsep + os.environ['PATH'])
+ utils.system ( [ '"%s" test_all.py --default-bjam --xml %s > %s' % (sys.executable, t, fn) ] )
+ finally:
+ os.chdir( cd )
+
+ def command_test_process(self):
+ self.import_utils()
+ self.log( 'Getting test case results out of "%s"...' % self.regression_log )
+ cd = os.getcwd()
+ os.chdir( os.path.join( self.boost_root, 'status' ) )
+ utils.checked_system( [
+ '"%s" "%s" <"%s"' % (
+ self.tool_path(self.process_jam_log),
+ self.regression_results,
+ self.regression_log )
+ ] )
+ os.chdir( cd )
+
+ def command_collect_logs(self):
+ self.import_utils()
+ comment_path = os.path.join( self.regression_root, self.comment )
+ if not os.path.exists( comment_path ):
+ self.log( 'Comment file "%s" not found; creating default comment.' % comment_path )
+ f = open( comment_path, 'w' )
+ f.write( '<p>Tests are run on %s platform.</p>' % self.platform_name() )
+ f.close()
+
+ source = 'tarball'
+ revision = ''
+ svn_root_file = os.path.join( self.boost_root, '.svn' )
+ svn_info_file = os.path.join( self.boost_root, 'svn_info.txt' )
+ if os.path.exists( svn_root_file ):
+ source = 'SVN'
+ self.svn_command( 'info --xml "%s" >"%s"' % (self.boost_root,svn_info_file) )
+
+ if os.path.exists( svn_info_file ):
+ f = open( svn_info_file, 'r' )
+ svn_info = f.read()
+ f.close()
+ i = svn_info.find( 'Revision:' )
+ if i < 0: i = svn_info.find( 'revision=' ) # --xml format
+ if i >= 0:
+ i += 10
+ while svn_info[i] >= '0' and svn_info[i] <= '9':
+ revision += svn_info[i]
+ i += 1
+
+ if self.pjl_toolset != 'python':
+ from collect_and_upload_logs import collect_logs
+ if self.incremental:
+ run_type = 'incremental'
+ else:
+ run_type = 'full'
+ collect_logs(
+ self.regression_results,
+ self.runner, self.tag, self.platform, comment_path,
+ self.timestamp_path,
+ self.user,
+ source, run_type,
+ self.dart_server, self.proxy,
+ revision )
+ else:
+ from process_jam_log import BJamLog2Results
+ if self.incremental:
+ run_type = '--incremental'
+ else:
+ run_type = ''
+ BJamLog2Results([
+ '--output='+os.path.join(self.regression_results,self.runner+'.xml'),
+ '--runner='+self.runner,
+ '--comment='+comment_path,
+ '--tag='+self.tag,
+ '--platform='+self.platform,
+ '--source='+source,
+ '--revision='+revision,
+ run_type,
+ self.regression_log
+ ])
+ self.compress_file(
+ os.path.join(self.regression_results,self.runner+'.xml'),
+ os.path.join(self.regression_results,self.runner+'.zip')
+ )
+
+ def command_upload_logs(self):
+ self.import_utils()
+ from collect_and_upload_logs import upload_logs
+ if self.ftp:
+ self.retry(
+ lambda:
+ upload_logs(
+ self.regression_results,
+ self.runner, self.tag,
+ self.user,
+ self.ftp_proxy,
+ self.debug_level, self.send_bjam_log,
+ self.timestamp_path,
+ self.dart_server,
+ ftp_url = self.ftp )
+ )
+ else:
+ self.retry(
+ lambda:
+ upload_logs(
+ self.regression_results,
+ self.runner, self.tag,
+ self.user,
+ self.ftp_proxy,
+ self.debug_level, self.send_bjam_log,
+ self.timestamp_path,
+ self.dart_server )
+ )
+
+ def command_regression(self):
+ import socket
+ import string
+ try:
+ mail_subject = 'Boost regression for %s on %s' % ( self.tag,
+ string.split(socket.gethostname(), '.')[0] )
+ start_time = time.localtime()
+ if self.mail:
+ self.log( 'Sending start notification to "%s"' % self.mail )
+ self.send_mail(
+ '%s started at %s.' % ( mail_subject, format_time( start_time ) )
+ )
+
+ self.command_get_tools()
+
+ if self.local is not None:
+ self.log( 'Using local file "%s"' % self.local )
+ b = os.path.basename( self.local )
+ tag = b[ 0: b.find( '.' ) ]
+ self.log( 'Tag: "%s"' % tag )
+ self.unpack_tarball( self.local, self.boost_root )
+
+ elif self.have_source:
+ if not self.incremental: self.command_cleanup( 'bin' )
+
+ else:
+ if self.incremental or self.force_update:
+ if not self.incremental: self.command_cleanup( 'bin' )
+ else:
+ self.command_cleanup()
+ self.command_get_source()
+
+ self.command_setup()
+
+ # Not specifying --toolset in command line is not enough
+ # that would mean to use Boost.Build default ones
+ # We can skip test only we were explictly
+ # told to have no toolsets in command line "--toolset="
+ if self.toolsets != '': # --toolset=,
+ if not self.skip_tests:
+ self.command_test()
+ self.command_collect_logs()
+ self.command_upload_logs()
+
+ if self.mail:
+ self.log( 'Sending report to "%s"' % self.mail )
+ end_time = time.localtime()
+ self.send_mail(
+ '%s completed successfully at %s.' % ( mail_subject, format_time( end_time ) )
+ )
+ except:
+ if self.mail:
+ self.log( 'Sending report to "%s"' % self.mail )
+ traceback_ = '\n'.join( apply( traceback.format_exception, sys.exc_info() ) )
+ end_time = time.localtime()
+ self.send_mail(
+ '%s failed at %s.' % ( mail_subject, format_time( end_time ) ),
+ traceback_ )
+ raise
+
+ def command_show_revision(self):
+ modified = '$Date$'
+ revision = '$Revision$'
+
+ import re
+ re_keyword_value = re.compile( r'^\$\w+:\s+(.*)\s+\$$' )
+ print '\n\tRevision: %s' % re_keyword_value.match( revision ).group( 1 )
+ print '\tLast modified on: %s\n' % re_keyword_value.match( modified ).group( 1 )
+
+ #~ Utilities...
+
+ def main(self):
+ for action in self.actions:
+ action_m = "command_"+action.replace('-','_')
+ if hasattr(self,action_m):
+ getattr(self,action_m)()
+
+ def platform_name(self):
+ # See http://article.gmane.org/gmane.comp.lib.boost.testing/933
+ if sys.platform == 'win32':
+ return 'Windows'
+ elif sys.platform == 'cygwin':
+ return 'Windows/Cygwin'
+ return platform.system()
+
+ def log(self,message):
+ sys.stdout.flush()
+ sys.stderr.flush()
+ sys.stderr.write( '# %s\n' % message )
+ sys.stderr.flush()
+
+ def rmtree(self,path):
+ if os.path.exists( path ):
+ import shutil
+ #~ shutil.rmtree( unicode( path ) )
+ if sys.platform == 'win32':
+ os.system( 'del /f /s /q "%s" >nul 2>&1' % path )
+ shutil.rmtree( unicode( path ) )
+ else:
+ os.system( 'rm -f -r "%s"' % path )
+
+ def refresh_timestamp( self ):
+ if os.path.exists( self.timestamp_path ):
+ os.unlink( self.timestamp_path )
+ open( self.timestamp_path, 'w' ).close()
+
+ def timestamp( self ):
+ return time.strftime(
+ '%Y-%m-%dT%H:%M:%SZ',
+ time.gmtime( os.stat( self.timestamp_path ).st_mtime ) )
+
+ def retry( self, f, max_attempts=5, sleep_secs=10 ):
+ for attempts in range( max_attempts, -1, -1 ):
+ try:
+ return f()
+ except Exception, msg:
+ self.log( '%s failed with message "%s"' % ( f.__name__, msg ) )
+ if attempts == 0:
+ self.log( 'Giving up.' )
+ raise
+
+ self.log( 'Retrying (%d more attempts).' % attempts )
+ time.sleep( sleep_secs )
+
+ def http_get( self, source_url, destination_file ):
+ import urllib
+
+ proxies = None
+ if hasattr(self,'proxy') and self.proxy is not None:
+ proxies = { 'http' : self.proxy }
+
+ src = urllib.urlopen( source_url, proxies = proxies )
+
+ f = open( destination_file, 'wb' )
+ while True:
+ data = src.read( 16*1024 )
+ if len( data ) == 0: break
+ f.write( data )
+
+ f.close()
+ src.close()
+
+ def import_utils(self):
+ global utils
+ if utils is None:
+ sys.path.append( self.xsl_reports_dir )
+ import utils as utils_module
+ utils = utils_module
+
+ def build_if_needed( self, tool, toolset ):
+ self.import_utils()
+ if os.path.exists( tool[ 'path' ] ):
+ self.log( 'Found preinstalled "%s"; will use it.' % tool[ 'path' ] )
+ return
+
+ self.log( 'Preinstalled "%s" is not found; building one...' % tool[ 'path' ] )
+
+ if toolset is None:
+ if self.toolsets is not None:
+ toolset = string.split( self.toolsets, ',' )[0]
+ else:
+ toolset = tool[ 'default_toolset' ]
+ self.log( 'Warning: No bootstrap toolset for "%s" was specified.' % tool[ 'name' ] )
+ self.log( ' Using default toolset for the platform (%s).' % toolset )
+
+ if os.path.exists( tool[ 'source_dir' ] ):
+ self.log( 'Found "%s" source directory "%s"' % ( tool[ 'name' ], tool[ 'source_dir' ] ) )
+ build_cmd = tool[ 'build_cmd' ]( toolset, tool['build_args'] )
+ self.log( 'Building "%s" (%s)...' % ( tool[ 'name'], build_cmd ) )
+ utils.system( [ 'cd "%s"' % tool[ 'source_dir' ], build_cmd ] )
+ else:
+ raise 'Could not find "%s" source directory "%s"' % ( tool[ 'name' ], tool[ 'source_dir' ] )
+
+ if not tool.has_key( 'build_path' ):
+ tool[ 'build_path' ] = self.tool_path( tool )
+
+ if not os.path.exists( tool[ 'build_path' ] ):
+ raise 'Failed to find "%s" after build.' % tool[ 'build_path' ]
+
+ self.log( '%s succesfully built in "%s" location' % ( tool[ 'name' ], tool[ 'build_path' ] ) )
+
+ def tool_path( self, name_or_spec ):
+ if isinstance( name_or_spec, basestring ):
+ return os.path.join( self.regression_root, name_or_spec )
+
+ if os.path.exists( name_or_spec[ 'path' ] ):
+ return name_or_spec[ 'path' ]
+
+ if name_or_spec.has_key( 'build_path' ):
+ return name_or_spec[ 'build_path' ]
+
+ build_dir = name_or_spec[ 'build_dir' ]
+ self.log( 'Searching for "%s" in "%s"...' % ( name_or_spec[ 'name' ], build_dir ) )
+ for root, dirs, files in os.walk( build_dir ):
+ if name_or_spec[ 'name' ] in files:
+ return os.path.join( root, name_or_spec[ 'name' ] )
+
+ raise Exception( 'Cannot find "%s" in any of the following locations:\n%s' % (
+ name_or_spec[ 'name' ]
+ , '\n'.join( [ name_or_spec[ 'path' ], build_dir ] )
+ ) )
+
+ def bjam_build_cmd( self, *rest ):
+ if sys.platform == 'win32':
+ cmd = 'build.bat %s' % self.bjam_toolset
+ else:
+ cmd = './build.sh %s' % self.bjam_toolset
+ env_setup_key = 'BJAM_ENVIRONMENT_SETUP'
+ if os.environ.has_key( env_setup_key ):
+ return '%s & %s' % ( os.environ[env_setup_key], cmd )
+ return cmd
+
+ def bjam_cmd( self, toolsets, args = '', *rest ):
+ build_path = self.regression_root
+ if build_path[-1] == '\\': build_path += '\\'
+
+ if self.timeout > 0:
+ args += ' -l%s' % (self.timeout*60)
+
+ cmd = '"%(bjam)s"' +\
+ ' "-sBOOST_BUILD_PATH=%(bbpath)s"' +\
+ ' "-sBOOST_ROOT=%(boost)s"' +\
+ ' "--boost=%(boost)s"' +\
+ ' "--boost-build=%(bb)s"' +\
+ ' "--debug-configuration"' +\
+ ' %(arg)s'
+ cmd %= {
+ 'bjam' : self.tool_path( self.bjam ),
+ 'bbpath' : os.pathsep.join([build_path,self.tools_bb_root]),
+ 'bb' : self.tools_bb_root,
+ 'boost' : self.boost_root,
+ 'arg' : args }
+
+ if toolsets:
+ import string
+ cmd += ' ' + string.join(string.split( toolsets, ',' ), ' ' )
+
+ return cmd
+
+ def send_mail( self, subject, msg = '' ):
+ import smtplib
+ if not self.smtp_login:
+ server_name = 'mail.%s' % mail.split( '@' )[-1]
+ user_name = None
+ password = None
+ else:
+ server_name = self.smtp_login.split( '@' )[-1]
+ ( user_name, password ) = string.split( self.smtp_login.split( '@' )[0], ':' )
+
+ log( ' Sending mail through "%s"...' % server_name )
+ smtp_server = smtplib.SMTP( server_name )
+ smtp_server.set_debuglevel( self.debug_level )
+ if user_name:
+ smtp_server.login( user_name, password )
+
+ smtp_server.sendmail( self.mail, [ self.mail ],
+ 'Subject: %s\nTo: %s\n\n%s' % ( subject, self.mail, msg ) )
+
+ def compress_file( self, file_path, archive_path ):
+ self.import_utils()
+ utils.log( 'Compressing "%s"...' % file_path )
+
+ try:
+ import zipfile
+ z = zipfile.ZipFile( archive_path, 'w', zipfile.ZIP_DEFLATED )
+ z.write( file_path, os.path.basename( file_path ) )
+ z.close()
+ utils.log( 'Done writing "%s".'% archive_path )
+ except Exception, msg:
+ utils.log( 'Warning: Compressing falied (%s)' % msg )
+ utils.log( ' Trying to compress using a platform-specific tool...' )
+ try:
+ import zip_cmd
+ except ImportError:
+ script_dir = os.path.dirname( os.path.abspath( sys.argv[0] ) )
+ utils.log( 'Could not find \'zip_cmd\' module in the script directory (%s).' % script_dir )
+ raise Exception( 'Compressing failed!' )
+ else:
+ if os.path.exists( archive_path ):
+ os.unlink( archive_path )
+ utils.log( 'Removing stale "%s".' % archive_path )
+
+ zip_cmd.main( file_path, archive_path )
+ utils.log( 'Done compressing "%s".' % archive_path )
+
+ #~ Dowloading source, from SVN...
+
+ def svn_checkout( self ):
+ os.chdir( self.regression_root )
+ self.svn_command( 'co %s %s' % (self.svn_repository_url(self.tag),'boost') )
+
+ def svn_update( self ):
+ os.chdir( self.boost_root )
+ self.svn_command( 'update' )
+
+ def svn_command( self, command ):
+ svn_anonymous_command_line = 'svn --non-interactive %(command)s'
+ svn_command_line = 'svn --non-interactive --username=%(user)s %(command)s'
+
+ if not hasattr(self,'user') or self.user is None or self.user == 'anonymous':
+ cmd = svn_anonymous_command_line % { 'command': command }
+ else:
+ cmd = svn_command_line % { 'user': self.user, 'command': command }
+
+ self.log( 'Executing SVN command "%s"' % cmd )
+ rc = os.system( cmd )
+ if rc != 0:
+ raise Exception( 'SVN command "%s" failed with code %d' % ( cmd, rc ) )
+
+ def svn_repository_url( self, path ):
+ if self.user != 'anonymous' and self.user != '':
+ return '%s%s' % (repo_root['user'],path)
+ else:
+ return '%s%s' % (repo_root['anon'],path)
+
+ #~ Downloading and extracting source archives, from tarballs or zipballs...
+
+ def get_tarball( self, *args ):
+ if not args or args == []:
+ args = [ 'download', 'unpack' ]
+
+ tarball_path = None
+
+ if hasattr(self,'local') and self.local is not None:
+ tarball_path = self.local
+ elif 'download' in args:
+ tarball_path = self.download_tarball(self.boost_tarball_name(),self.boost_tarball_url())
+ if not tarball_path:
+ tarball_path = os.path.join( self.regression_root, self.boost_tarball_url() )
+
+ if 'unpack' in args:
+ self.unpack_tarball( tarball_path, self.boost_root )
+ pass
+
+ def download_tarball( self, tarball_name, tarball_url ):
+ tarball_path = os.path.join( self.regression_root, tarball_name )
+
+ self.log( 'Downloading "%s" to "%s"...' % ( tarball_url, os.path.dirname( tarball_path ) ) )
+
+ if os.path.exists( tarball_path ):
+ os.unlink( tarball_path )
+ self.http_get( tarball_url, tarball_path )
+
+ return tarball_path
+
+ def tarball_url( self, path ):
+ return 'http://beta.boost.org/development/snapshot.php/%s' % path
+
+ def boost_tarball_name( self ):
+ return 'boost-%s.tar.bz2' % self.tag.split( '/' )[-1]
+
+ def boost_tarball_url( self ):
+ return self.tarball_url( self.tag )
+
+ def unpack_tarball( self, tarball_path, target_path ):
+ self.log( 'Looking for old unpacked archives...' )
+ old_boost_dirs = self.find_boost_dirs( )
+
+ for old_boost_dir in old_boost_dirs:
+ if old_boost_dir != tarball_path:
+ self.log( 'Deleting old directory %s.' % old_boost_dir )
+ self.rmtree( old_boost_dir )
+
+ self.log( 'Unpacking boost tarball ("%s")...' % tarball_path )
+
+ tarball_name = os.path.basename( tarball_path )
+ extension = tarball_name[ tarball_name.find( '.' ) : ]
+
+ if extension in ( ".tar.gz", ".tar.bz2" ):
+ import tarfile
+ import stat
+
+ mode = os.path.splitext( extension )[1][1:]
+ tar = tarfile.open( tarball_path, 'r:%s' % mode )
+ for tarinfo in tar:
+ tar.extract( tarinfo, self.regression_root )
+ if sys.platform == 'win32' and not tarinfo.isdir():
+ # workaround what appears to be a Win32-specific bug in 'tarfile'
+ # (modification times for extracted files are not set properly)
+ f = os.path.join( self.regression_root, tarinfo.name )
+ os.chmod( f, stat.S_IWRITE )
+ os.utime( f, ( tarinfo.mtime, tarinfo.mtime ) )
+ tar.close()
+ elif extension in ( ".zip" ):
+ import zipfile
+
+ z = zipfile.ZipFile( tarball_path, 'r', zipfile.ZIP_DEFLATED )
+ for f in z.infolist():
+ destination_file_path = os.path.join( self.regression_root, f.filename )
+ if destination_file_path[-1] == "/": # directory
+ if not os.path.exists( destination_file_path ):
+ os.makedirs( destination_file_path )
+ else: # file
+ result = open( destination_file_path, 'wb' )
+ result.write( z.read( f.filename ) )
+ result.close()
+ z.close()
+ else:
+ raise 'Do not know how to unpack archives with extension \"%s\"' % extension
+
+ boost_dir = self.find_boost_dirs()[0]
+ self.log( ' Unpacked into directory "%s"' % boost_dir )
+
+ if os.path.exists( target_path ):
+ self.log( 'Deleting "%s" directory...' % target_path )
+ self.rmtree( target_path )
+
+ self.log( 'Renaming "%s" into "%s"' % ( boost_dir, target_path ) )
+ os.rename( boost_dir, target_path )
+
+ def find_boost_dirs( self ):
+ return [
+ x for x in
+ glob.glob( os.path.join( self.regression_root, 'boost[-_]*' ) )
+ if os.path.isdir( x )
+ ]
+
+
diff --git a/tools/regression/src/run.py b/tools/regression/src/run.py
new file mode 100644
index 0000000000..5e8e0c7d01
--- /dev/null
+++ b/tools/regression/src/run.py
@@ -0,0 +1,60 @@
+#!/usr/bin/python
+
+# Copyright Redshift Software, Inc. 2007
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import os
+import os.path
+import shutil
+import sys
+import urllib
+
+#~ Using --skip-script-download is useful to avoid repeated downloading of
+#~ the regression scripts when doing the regression commands individually.
+no_update_argument = "--skip-script-download"
+no_update = no_update_argument in sys.argv
+if no_update:
+ del sys.argv[sys.argv.index(no_update_argument)]
+
+#~ The directory this file is in.
+root = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
+print '# Running regressions in %s...' % root
+
+script_sources = [ 'collect_and_upload_logs.py', 'process_jam_log.py', 'regression.py' ]
+script_local = os.path.join(root,'tools','regression','src')
+script_remote = 'http://svn.boost.org/svn/boost/trunk/tools/regression/src'
+script_dir = os.path.join(root,'tools_regression_src')
+
+if not no_update:
+ #~ Bootstrap.
+ #~ * Clear out any old versions of the scripts
+ print '# Creating regression scripts at %s...' % script_dir
+ if os.path.exists(script_dir):
+ shutil.rmtree(script_dir)
+ os.mkdir(script_dir)
+ #~ * Get new scripts, either from local working copy, or from svn
+ if os.path.exists(script_local):
+ print '# Copying regression scripts from %s...' % script_local
+ for src in script_sources:
+ shutil.copyfile( os.path.join(script_local,src), os.path.join(script_dir,src) )
+ else:
+ print '# Dowloading regression scripts from %s...' % script_remote
+ proxy = None
+ for a in sys.argv[1:]:
+ if a.startswith('--proxy='):
+ proxy = {'http' : a.split('=')[1] }
+ print '--- %s' %(proxy['http'])
+ break
+ for src in script_sources:
+ urllib.FancyURLopener(proxy).retrieve(
+ '%s/%s' % (script_remote,src), os.path.join(script_dir,src) )
+
+#~ * Make the scripts available to Python
+sys.path.insert(0,os.path.join(root,'tools_regression_src'))
+
+#~ Launch runner.
+from regression import runner
+runner(root)
diff --git a/tools/regression/src/run_tests.sh b/tools/regression/src/run_tests.sh
new file mode 100644
index 0000000000..483cecfeae
--- /dev/null
+++ b/tools/regression/src/run_tests.sh
@@ -0,0 +1,197 @@
+#!/bin/sh
+#
+# Copyright John Maddock
+# Copyright Rene Rivera
+#
+# Distributed under the Boost Software License, Version 1.0.
+# See http://www.boost.org/LICENSE_1_0.txt
+#
+# shell script for running the boost regression test suite and generating
+# a html table of results.
+
+# Set the following variables to configure the operation. Variables you
+# should set, i.e. usually required are listed first. Optional variables
+# have reasonable defaults for most situations.
+
+
+### THESE SHOULD BE CHANGED!
+
+#
+# "boost_root" points to the root of you boost installation:
+# This can be either a non-exitent directory or an already complete Boost
+# source tree.
+#
+boost_root="$HOME/CVSROOTs/Boost/boost_regression"
+
+#
+# Wether to fetch the most current Boost code from CVS (yes/no):
+# There are two contexts to use this script in: on an active Boost CVS
+# tree, and on a fresh Boost CVS tree. If "yes" is specified here an attempt
+# to fetch the latest CVS Boost files is made. For an active Boost CVS
+# the CVS connection information is used. If an empty tree is detected
+# the code is fetched with the anonymous read only information.
+#
+cvs_update=no
+
+#
+# "test_tools" are the Boost.Build toolsets to use for building and running the
+# regression tests. Specify a space separated list, of the Boost.Build toolsets.
+# Each will be built and tested in sequence.
+#
+test_tools=gcc
+
+#
+# "toolset" is the Boost.Build toolset to use for building the helper programs.
+# This is usually different than the toolsets one is testing. And this is
+# normally a toolset that corresponds to the compiler built into your platform.
+#
+toolset=gcc
+
+#
+# "comment_path" is the path to an html-file describing the test environment.
+# The content of this file will be embedded in the status pages being produced.
+#
+comment_path="$boost_root/../regression_comment.html"
+#
+# "test_dir" is the relative path to the directory to run the tests in,
+# defaults to "status" and runs all the tests, but could be a sub-directory
+# for example "libs/regex/test" to run the regex tests alone.
+#
+test_dir="status"
+
+
+### DEFAULTS ARE OK FOR THESE.
+
+#
+# "exe_suffix" the suffix used by exectable files:
+# In case your platform requires use of a special suffix for executables specify
+# it here, including the "." if needed. This should not be needed even in Windows
+# like platforms as they will execute without the suffix anyway.
+#
+exe_suffix=
+
+#
+# "bjam" points to your built bjam executable:
+# The location of the binary for running bjam. The default should work
+# under most circumstances.
+#
+bjam="$boost_root/tools/build/v2/engine/bin/bjam$exe_suffix"
+
+#
+# "process_jam_log", and "compiler_status" paths to built helper programs:
+# The location of the executables of the regression help programs. These
+# are built locally so the default should work in most situations.
+#
+process_jam_log="$boost_root/dist/bin/process_jam_log$exe_suffix"
+compiler_status="$boost_root/dist/bin/compiler_status$exe_suffix"
+
+#
+# "boost_build_path" can point to additional locations to find toolset files.
+#
+boost_build_path="$HOME/.boost-build"
+
+
+### NO MORE CONFIGURABLE PARTS.
+
+#
+# Some setup.
+#
+boost_dir=`basename "$boost_root"`
+if test -n "${BOOST_BUILD_PATH}" ; then
+ BOOST_BUILD_PATH="$boost_build_path:$BOOST_BUILD_PATH"
+else
+ BOOST_BUILD_PATH="$boost_build_path"
+fi
+export BOOST_BUILD_PATH
+
+#
+# STEP 0:
+#
+# Get the source code:
+#
+if test ! -d "$boost_root" ; then
+ mkdir -p "$boost_root"
+ if test $? -ne 0 ; then
+ echo "creation of $boost_root directory failed."
+ exit 256
+ fi
+fi
+if test $cvs_update = yes ; then
+ echo fetching Boost:
+ echo "/1 :pserver:anonymous@cvs.sourceforge.net:2401/cvsroot/boost A" >> "$HOME/.cvspass"
+ cat "$HOME/.cvspass" | sort | uniq > "$HOME/.cvspass"
+ cd `dirname "$boost_root"`
+ if test -f boost/CVS/Root ; then
+ cvs -z3 -d `cat "$boost_dir/CVS/Root"` co -d "$boost_dir" boost
+ else
+ cvs -z3 -d :pserver:anonymous@cvs.sourceforge.net:2401/cvsroot/boost co -d "$boost_dir" boost
+ fi
+fi
+
+#
+# STEP 1:
+# rebuild bjam if required:
+#
+echo building bjam:
+cd "$boost_root/tools/build/v2/engine" && \
+LOCATE_TARGET=bin sh ./build.sh
+if test $? != 0 ; then
+ echo "bjam build failed."
+ exit 256
+fi
+
+#
+# STEP 2:
+# rebuild the regression test helper programs if required:
+#
+echo building regression test helper programs:
+cd "$boost_root/tools/regression/build" && \
+"$bjam" $toolset release
+if test $? != 0 ; then
+ echo "helper program build failed."
+ exit 256
+fi
+
+#
+# STEP 5:
+# repeat steps 3 and 4 for each additional toolset:
+#
+for tool in $test_tools ; do
+
+#
+# STEP 3:
+# run the regression tests:
+#
+echo running the $tool regression tests:
+cd "$boost_root/$test_dir"
+"$bjam" $tool --dump-tests 2>&1 | tee regress.log
+
+#
+# STEP 4:
+# post process the results:
+#
+echo processing the regression test results for $tool:
+cat regress.log | "$process_jam_log" --v2
+if test $? != 0 ; then
+ echo "Failed regression log post processing."
+ exit 256
+fi
+
+done
+
+#
+# STEP 6:
+# create the html table:
+#
+uname=`uname`
+echo generating html tables:
+"$compiler_status" --v2 --comment "$comment_path" "$boost_root" cs-$uname.html cs-$uname-links.html
+if test $? != 0 ; then
+ echo "Failed HTML result table generation."
+ exit 256
+fi
+
+echo "done!"
+
+
+
diff --git a/tools/regression/src/smoke.py b/tools/regression/src/smoke.py
new file mode 100755
index 0000000000..1b17cfaaee
--- /dev/null
+++ b/tools/regression/src/smoke.py
@@ -0,0 +1,197 @@
+# smoke test - every so many minutes, check svn revision, and if changed:
+# update working copy, run tests, upload results
+
+# Copyright Beman Dawes 2007
+
+# Distributed under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# ---------------------------------------------------------------------------- #
+
+import os
+import sys
+import platform
+import time
+import ftplib
+
+# invoke the system command line processor
+def cmd(command):
+ print "command:", command
+ os.system(command)
+
+# update SVN working copy
+def update_working_copy(boost_path):
+ os.chdir(boost_path)
+ cmd("svn update")
+
+# get repository url
+def repository_url(path, results_path):
+ url = ""
+ svn_info_file = results_path + "/svn_info.xml"
+ command = "svn info --xml " + path + " >" + svn_info_file
+ cmd(command)
+ f = open( svn_info_file, 'r' )
+ svn_info = f.read()
+ f.close()
+ i = svn_info.find('//svn.boost.org')
+ if i >= 0:
+ url = svn_info[i:svn_info.find("</url>")]
+ return url
+
+# get revision number of a path, which may be a filesystem path or URL
+def revision(path, results_path, test_name):
+ rev = 0
+ svn_info_file = results_path + "/" + test_name + "-svn_info.xml"
+ command = "svn info --xml " + path + " >" + svn_info_file
+ cmd(command)
+ f = open( svn_info_file, 'r' )
+ svn_info = f.read()
+ f.close()
+ i = svn_info.find( 'revision=' )
+ if i >= 0:
+ i += 10
+ while svn_info[i] >= '0' and svn_info[i] <= '9':
+ rev = rev*10 + int(svn_info[i])
+ i += 1
+ return rev
+
+# run bjam in current directory
+def bjam(boost_path, args, output_path, test_name):
+
+ # bjam seems to need BOOST_BUILD_PATH
+ #os.environ["BOOST_BUILD_PATH"]=boost_path + "/tools/build/v2"
+
+ print "Begin bjam..."
+ command = "bjam --v2 --dump-tests -l180"
+ if args != "": command += " " + args
+ command += " >" + output_path + "/" + test_name +"-bjam.log 2>&1"
+ cmd(command)
+
+# run process_jam_log in current directory
+def process_jam_log(boost_path, output_path, test_name):
+ print "Begin log processing..."
+ command = "process_jam_log " + boost_path + " <" +\
+ output_path + "/" + test_name +"-bjam.log"
+ cmd(command)
+
+# run compiler_status in current directory
+def compiler_status(boost_path, output_path, test_name):
+ print "Begin compiler status html creation... "
+ command = "compiler_status --v2 --ignore-pass --no-warn --locate-root " + boost_path + " " +\
+ boost_path + " " + output_path + "/" + test_name + "-results.html " +\
+ output_path + "/" + test_name + "-details.html "
+ cmd(command)
+
+# upload results via ftp
+def upload_to_ftp(results_path, test_name, ftp_url, user, psw, debug_level):
+
+ # to minimize the time web pages are not available, upload with temporary
+ # names and then rename to the permanent names
+
+ i = 0 # dummy variable
+ os.chdir(results_path)
+
+ tmp_results = "temp-" + test_name + "-results.html"
+ results = test_name + "-results.html"
+ tmp_details = "temp-" + test_name + "-details.html"
+ details = test_name + "-details.html"
+
+ print "Uploading results via ftp..."
+ ftp = ftplib.FTP( ftp_url, user, psw )
+ ftp.set_debuglevel( debug_level )
+
+ # ftp.cwd( site_path )
+
+ try: ftp.delete(tmp_results)
+ except: ++i
+
+ f = open( results, 'rb' )
+ ftp.storbinary( 'STOR %s' % tmp_results, f )
+ f.close()
+
+ try: ftp.delete(tmp_details)
+ except: ++i
+
+ f = open( details, 'rb' )
+ ftp.storbinary( 'STOR %s' % tmp_details, f )
+ f.close()
+
+ try: ftp.delete(results)
+ except: ++i
+
+ try: ftp.delete(details)
+ except: ++i
+
+ ftp.rename(tmp_results, results)
+ ftp.rename(tmp_details, details)
+
+ ftp.dir()
+ ftp.quit()
+
+def commit_results(results_path, test_name, rev):
+ print "Commit results..."
+ cwd = os.getcwd()
+ os.chdir(results_path)
+ command = "svn commit --non-interactive -m "+'"'+str(rev)+'" '+test_name+"-results.html"
+ cmd(command)
+ os.chdir(cwd)
+
+
+# ---------------------------------------------------------------------------- #
+
+if len(sys.argv) < 7:
+ print "Invoke with: minutes boost-path test-name results-path ftp-url user psw [bjam-args]"
+ print " boost-path must be path for a boost svn working directory."
+ print " results-path must be path for a svn working directory where an"
+ print " svn commit test-name+'-results.html' is valid."
+ print "Warning: This program hangs or crashes on network failures."
+ exit()
+
+minutes = int(sys.argv[1])
+boost_path = sys.argv[2]
+test_name = sys.argv[3]
+results_path = sys.argv[4]
+ftp_url = sys.argv[5]
+user = sys.argv[6]
+psw = sys.argv[7]
+if len(sys.argv) > 8: bjam_args = sys.argv[8]
+else: bjam_args = ""
+
+os.chdir(boost_path) # convert possible relative path
+boost_path = os.getcwd() # to absolute path
+
+print "minutes is ", minutes
+print "boost_path is ", boost_path
+print "test_name is ", test_name
+print "results_path is ", results_path
+print "ftp_url is ", ftp_url
+print "user is ", user
+print "psw is ", psw
+print 'bjam args are "' + bjam_args + '"'
+
+url = repository_url(boost_path, results_path)
+print "respository url is ", url
+
+first = 1
+while 1:
+ working_rev = revision(boost_path, results_path, test_name)
+ repos_rev = revision("http:" + url, results_path, test_name)
+ print "Working copy revision: ", working_rev, " repository revision: ", repos_rev
+ if first or working_rev != repos_rev:
+ first = 0
+ start_time = time.time()
+ print
+ print "start at", time.strftime("%H:%M:%S", time.localtime())
+ update_working_copy(boost_path)
+ os.chdir(boost_path+"/status")
+ bjam(boost_path, bjam_args, results_path, test_name)
+ process_jam_log(boost_path, results_path, test_name)
+ compiler_status(boost_path, results_path, test_name)
+ upload_to_ftp(results_path, test_name, ftp_url, user, psw, 0)
+ commit_results(results_path, test_name,revision(boost_path, results_path, test_name))
+ elapsed_time = time.time() - start_time
+ print elapsed_time/60.0, "minutes elapsed time"
+ print
+
+ print "sleep ", minutes, "minutes..."
+ time.sleep(60 * minutes)