summaryrefslogtreecommitdiff
path: root/Tools/Scripts/webkitpy/performance_tests
diff options
context:
space:
mode:
authorSimon Hausmann <simon.hausmann@nokia.com>2012-01-06 14:44:00 +0100
committerSimon Hausmann <simon.hausmann@nokia.com>2012-01-06 14:44:00 +0100
commit40736c5763bf61337c8c14e16d8587db021a87d4 (patch)
treeb17a9c00042ad89cb1308e2484491799aa14e9f8 /Tools/Scripts/webkitpy/performance_tests
downloadqtwebkit-40736c5763bf61337c8c14e16d8587db021a87d4.tar.gz
Imported WebKit commit 2ea9d364d0f6efa8fa64acf19f451504c59be0e4 (http://svn.webkit.org/repository/webkit/trunk@104285)
Diffstat (limited to 'Tools/Scripts/webkitpy/performance_tests')
-rw-r--r--Tools/Scripts/webkitpy/performance_tests/__init__.py1
-rw-r--r--Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py168
-rwxr-xr-xTools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py143
3 files changed, 312 insertions, 0 deletions
diff --git a/Tools/Scripts/webkitpy/performance_tests/__init__.py b/Tools/Scripts/webkitpy/performance_tests/__init__.py
new file mode 100644
index 000000000..ef65bee5b
--- /dev/null
+++ b/Tools/Scripts/webkitpy/performance_tests/__init__.py
@@ -0,0 +1 @@
+# Required for Python to search this directory for module files
diff --git a/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py b/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py
new file mode 100644
index 000000000..055305732
--- /dev/null
+++ b/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+# Copyright (C) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Run Inspector's perf tests in perf mode."""
+
+import logging
+import optparse
+import re
+import sys
+
+from webkitpy.common import find_files
+from webkitpy.common.host import Host
+from webkitpy.layout_tests.port.driver import DriverInput
+from webkitpy.layout_tests.views import printing
+
+_log = logging.getLogger(__name__)
+
+
+class PerfTestsRunner(object):
+ _perf_tests_base_dir = 'PerformanceTests'
+ _result_regex = re.compile('^RESULT .*$')
+
+ def __init__(self, perf_tests_dir, regular_output=sys.stderr, buildbot_output=sys.stdout, args=None):
+ self._perf_tests_dir = perf_tests_dir
+ self._buildbot_output = buildbot_output
+ self._options, self._args = self._parse_args(args)
+ self._host = Host()
+ self._host._initialize_scm()
+ self._port = self._host.port_factory.get(self._options.platform, self._options)
+ self._printer = printing.Printer(self._port, self._options, regular_output, buildbot_output, configure_logging=False)
+ self._webkit_base_dir_len = len(self._port.webkit_base())
+
+ def _parse_args(self, args=None):
+ print_options = printing.print_options()
+
+ perf_option_list = [
+ optparse.make_option('--debug', action='store_const', const='Debug', dest="configuration",
+ help='Set the configuration to Debug'),
+ optparse.make_option('--release', action='store_const', const='Release', dest="configuration",
+ help='Set the configuration to Release'),
+ optparse.make_option("--platform",
+ help="Specify port/platform being tested (i.e. chromium-mac)"),
+ optparse.make_option("--build-directory",
+ help="Path to the directory under which build files are kept (should not include configuration)"),
+ optparse.make_option("--time-out-ms", default=30000,
+ help="Set the timeout for each test"),
+ ]
+
+ option_list = (perf_option_list + print_options)
+ return optparse.OptionParser(option_list=option_list).parse_args(args)
+
+ def _collect_tests(self, webkit_base, filesystem=None):
+ """Return the list of tests found."""
+
+ def _is_test_file(filesystem, dirname, filename):
+ return filename.endswith('.html')
+
+ filesystem = filesystem or self._host.filesystem
+ base_dir = filesystem.join(webkit_base, self._perf_tests_base_dir, self._perf_tests_dir)
+ return find_files.find(filesystem, base_dir, paths=self._args, file_filter=_is_test_file)
+
+ def run(self):
+ if self._options.help_printing:
+ self._printer.help_printing()
+ self._printer.cleanup()
+ return 0
+
+ if not self._port.check_build(needs_http=False):
+ _log.error("Build not up to date for %s" % self._port._path_to_driver())
+ return -1
+
+ # We wrap any parts of the run that are slow or likely to raise exceptions
+ # in a try/finally to ensure that we clean up the logging configuration.
+ unexpected = -1
+ try:
+ tests = self._collect_tests(self._port.webkit_base())
+ unexpected = self._run_tests_set(tests, self._port)
+ finally:
+ self._printer.cleanup()
+
+ return unexpected
+
+ def _run_tests_set(self, tests, port):
+ result_count = len(tests)
+ expected = 0
+ unexpected = 0
+ self._printer.print_one_line_summary(result_count, 0, 0)
+ driver_need_restart = False
+ driver = None
+
+ for test in tests:
+ if driver_need_restart:
+ _log.debug("%s killing driver" % test)
+ driver.stop()
+ driver = None
+ if not driver:
+ driver = port.create_driver(worker_number=1)
+
+ test_failed, driver_need_restart = self._run_single_test(test, driver)
+ if test_failed:
+ unexpected = unexpected + 1
+ else:
+ expected = expected + 1
+
+ self._printer.print_one_line_summary(result_count, expected, unexpected)
+
+ if driver:
+ driver.stop()
+
+ return unexpected
+
+ def _run_single_test(self, test, driver):
+ test_failed = False
+ driver_need_restart = False
+ output = driver.run_test(DriverInput(test, self._options.time_out_ms, None, False))
+
+ if output.text == None:
+ test_failed = True
+ elif output.timeout:
+ self._printer.write('timeout: %s' % test[self._webkit_base_dir_len + 1:])
+ test_failed = True
+ driver_need_restart = True
+ elif output.crash:
+ self._printer.write('crash: %s' % test[self._webkit_base_dir_len + 1:])
+ driver_need_restart = True
+ test_failed = True
+ else:
+ got_a_result = False
+ for line in re.split('\n', output.text):
+ if self._result_regex.match(line):
+ self._buildbot_output.write("%s\n" % line)
+ got_a_result = True
+ elif not len(line) == 0:
+ test_failed = True
+ self._printer.write("%s" % line)
+ test_failed = test_failed or not got_a_result
+
+ if len(output.error):
+ self._printer.write('error:\n%s' % output.error)
+ test_failed = True
+
+ return test_failed, driver_need_restart
diff --git a/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py b/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py
new file mode 100755
index 000000000..782f5e7c9
--- /dev/null
+++ b/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py
@@ -0,0 +1,143 @@
+#!/usr/bin/python
+# Copyright (C) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Unit tests for run_perf_tests."""
+
+import unittest
+
+from webkitpy.common import array_stream
+from webkitpy.common.system.filesystem_mock import MockFileSystem
+from webkitpy.layout_tests.port.driver import DriverInput, DriverOutput
+from webkitpy.layout_tests.views import printing
+from webkitpy.performance_tests.perftestsrunner import PerfTestsRunner
+
+
+class MainTest(unittest.TestCase):
+ class TestPort:
+ def create_driver(self, worker_number=None):
+ return MainTest.TestDriver()
+
+ class TestDriver:
+ def run_test(self, driver_input):
+ text = ''
+ timeout = False
+ crash = False
+ if driver_input.test_name == 'pass.html':
+ text = 'RESULT group_name: test_name= 42 ms'
+ elif driver_input.test_name == 'timeout.html':
+ timeout = True
+ elif driver_input.test_name == 'failed.html':
+ text = None
+ elif driver_input.test_name == 'tonguey.html':
+ text = 'we are not expecting an output from perf tests but RESULT blablabla'
+ elif driver_input.test_name == 'crash.html':
+ crash = True
+ return DriverOutput(text, '', '', '', crash=crash, timeout=timeout)
+
+ def stop(self):
+ """do nothing"""
+
+ def create_runner(self, buildbot_output=None):
+ buildbot_output = buildbot_output or array_stream.ArrayStream()
+ regular_output = array_stream.ArrayStream()
+ return PerfTestsRunner('', regular_output, buildbot_output, args=[])
+
+ def run_test(self, test_name):
+ runner = self.create_runner()
+ driver = MainTest.TestDriver()
+ return runner._run_single_test(test_name, driver)
+
+ def test_run_passing_test(self):
+ test_failed, driver_need_restart = self.run_test('pass.html')
+ self.assertFalse(test_failed)
+ self.assertFalse(driver_need_restart)
+
+ def test_run_silent_test(self):
+ test_failed, driver_need_restart = self.run_test('silent.html')
+ self.assertTrue(test_failed)
+ self.assertFalse(driver_need_restart)
+
+ def test_run_failed_test(self):
+ test_failed, driver_need_restart = self.run_test('failed.html')
+ self.assertTrue(test_failed)
+ self.assertFalse(driver_need_restart)
+
+ def test_run_tonguey_test(self):
+ test_failed, driver_need_restart = self.run_test('tonguey.html')
+ self.assertTrue(test_failed)
+ self.assertFalse(driver_need_restart)
+
+ def test_run_timeout_test(self):
+ test_failed, driver_need_restart = self.run_test('timeout.html')
+ self.assertTrue(test_failed)
+ self.assertTrue(driver_need_restart)
+
+ def test_run_crash_test(self):
+ test_failed, driver_need_restart = self.run_test('crash.html')
+ self.assertTrue(test_failed)
+ self.assertTrue(driver_need_restart)
+
+ def test_run_test_set(self):
+ buildbot_output = array_stream.ArrayStream()
+ runner = self.create_runner(buildbot_output)
+ port = MainTest.TestPort()
+ tests = ['pass.html', 'silent.html', 'failed.html', 'tonguey.html', 'timeout.html', 'crash.html']
+ unexpected_result_count = runner._run_tests_set(tests, port)
+ self.assertEqual(unexpected_result_count, len(tests) - 1)
+ self.assertEqual(len(buildbot_output.get()), 1)
+ self.assertEqual(buildbot_output.get()[0], 'RESULT group_name: test_name= 42 ms\n')
+
+ def test_collect_tests(self):
+ runner = self.create_runner()
+ webkit_base = '/test.checkout'
+ filesystem = MockFileSystem()
+ filename = filesystem.join(webkit_base, 'PerformanceTests', 'a_file.html')
+ filesystem.files[filename] = 'a content'
+ tests = runner._collect_tests(webkit_base, filesystem)
+ self.assertEqual(len(tests), 1)
+
+ def test_parse_args(self):
+ runner = self.create_runner()
+ options, args = runner._parse_args([
+ '--verbose',
+ '--build-directory=folder42',
+ '--platform=platform42',
+ '--time-out-ms=42',
+ '--debug', 'an_arg'])
+ self.assertEqual(options.verbose, True)
+ self.assertEqual(options.help_printing, None)
+ self.assertEqual(options.build_directory, 'folder42')
+ self.assertEqual(options.platform, 'platform42')
+ self.assertEqual(options.time_out_ms, '42')
+ self.assertEqual(options.configuration, 'Debug')
+ self.assertEqual(options.print_options, None)
+
+
+if __name__ == '__main__':
+ unittest.main()