summaryrefslogtreecommitdiff
path: root/Tools/Scripts/webkitpy/common/net
diff options
context:
space:
mode:
authorSimon Hausmann <simon.hausmann@nokia.com>2012-01-06 14:44:00 +0100
committerSimon Hausmann <simon.hausmann@nokia.com>2012-01-06 14:44:00 +0100
commit40736c5763bf61337c8c14e16d8587db021a87d4 (patch)
treeb17a9c00042ad89cb1308e2484491799aa14e9f8 /Tools/Scripts/webkitpy/common/net
downloadqtwebkit-40736c5763bf61337c8c14e16d8587db021a87d4.tar.gz
Imported WebKit commit 2ea9d364d0f6efa8fa64acf19f451504c59be0e4 (http://svn.webkit.org/repository/webkit/trunk@104285)
Diffstat (limited to 'Tools/Scripts/webkitpy/common/net')
-rw-r--r--Tools/Scripts/webkitpy/common/net/__init__.py1
-rw-r--r--Tools/Scripts/webkitpy/common/net/bugzilla/__init__.py7
-rw-r--r--Tools/Scripts/webkitpy/common/net/bugzilla/attachment.py118
-rw-r--r--Tools/Scripts/webkitpy/common/net/bugzilla/bug.py125
-rw-r--r--Tools/Scripts/webkitpy/common/net/bugzilla/bug_unittest.py47
-rw-r--r--Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py874
-rw-r--r--Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_mock.py397
-rw-r--r--Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_unittest.py518
-rw-r--r--Tools/Scripts/webkitpy/common/net/buildbot/__init__.py5
-rw-r--r--Tools/Scripts/webkitpy/common/net/buildbot/buildbot.py494
-rw-r--r--Tools/Scripts/webkitpy/common/net/buildbot/buildbot_mock.py115
-rw-r--r--Tools/Scripts/webkitpy/common/net/buildbot/buildbot_unittest.py472
-rw-r--r--Tools/Scripts/webkitpy/common/net/buildbot/chromiumbuildbot.py47
-rw-r--r--Tools/Scripts/webkitpy/common/net/credentials.py154
-rw-r--r--Tools/Scripts/webkitpy/common/net/credentials_unittest.py186
-rw-r--r--Tools/Scripts/webkitpy/common/net/failuremap.py88
-rw-r--r--Tools/Scripts/webkitpy/common/net/failuremap_unittest.py80
-rw-r--r--Tools/Scripts/webkitpy/common/net/file_uploader.py114
-rw-r--r--Tools/Scripts/webkitpy/common/net/htdigestparser.py54
-rw-r--r--Tools/Scripts/webkitpy/common/net/htdigestparser_unittest.py82
-rw-r--r--Tools/Scripts/webkitpy/common/net/irc/__init__.py1
-rw-r--r--Tools/Scripts/webkitpy/common/net/irc/irc_mock.py37
-rw-r--r--Tools/Scripts/webkitpy/common/net/irc/ircbot.py91
-rw-r--r--Tools/Scripts/webkitpy/common/net/irc/ircproxy.py62
-rw-r--r--Tools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py43
-rw-r--r--Tools/Scripts/webkitpy/common/net/layouttestresults.py175
-rw-r--r--Tools/Scripts/webkitpy/common/net/layouttestresults_unittest.py146
-rw-r--r--Tools/Scripts/webkitpy/common/net/networktransaction.py70
-rw-r--r--Tools/Scripts/webkitpy/common/net/networktransaction_unittest.py94
-rw-r--r--Tools/Scripts/webkitpy/common/net/regressionwindow.py52
-rw-r--r--Tools/Scripts/webkitpy/common/net/resultsjsonparser.py152
-rw-r--r--Tools/Scripts/webkitpy/common/net/resultsjsonparser_unittest.py96
-rw-r--r--Tools/Scripts/webkitpy/common/net/statusserver.py170
-rw-r--r--Tools/Scripts/webkitpy/common/net/statusserver_mock.py68
-rw-r--r--Tools/Scripts/webkitpy/common/net/statusserver_unittest.py43
-rw-r--r--Tools/Scripts/webkitpy/common/net/web.py36
-rw-r--r--Tools/Scripts/webkitpy/common/net/web_mock.py51
37 files changed, 5365 insertions, 0 deletions
diff --git a/Tools/Scripts/webkitpy/common/net/__init__.py b/Tools/Scripts/webkitpy/common/net/__init__.py
new file mode 100644
index 000000000..ef65bee5b
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/__init__.py
@@ -0,0 +1 @@
+# Required for Python to search this directory for module files
diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/__init__.py b/Tools/Scripts/webkitpy/common/net/bugzilla/__init__.py
new file mode 100644
index 000000000..c427b1800
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/bugzilla/__init__.py
@@ -0,0 +1,7 @@
+# Required for Python to search this directory for module files
+
+# We only export public API here.
+from .bugzilla import Bugzilla
+# Unclear if Bug and Attachment need to be public classes.
+from .bug import Bug
+from .attachment import Attachment
diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/attachment.py b/Tools/Scripts/webkitpy/common/net/bugzilla/attachment.py
new file mode 100644
index 000000000..6e10d65a9
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/bugzilla/attachment.py
@@ -0,0 +1,118 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Copyright (c) 2009 Apple Inc. All rights reserved.
+# Copyright (c) 2010 Research In Motion Limited. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from webkitpy.common.memoized import memoized
+from webkitpy.common.system.deprecated_logging import log
+
+
+class Attachment(object):
+
+ rollout_preamble = "ROLLOUT of r"
+
+ def __init__(self, attachment_dictionary, bug):
+ self._attachment_dictionary = attachment_dictionary
+ self._bug = bug
+ # FIXME: These should be replaced with @memoized after updating mocks.
+ self._reviewer = None
+ self._committer = None
+
+ def _bugzilla(self):
+ return self._bug._bugzilla
+
+ def id(self):
+ return int(self._attachment_dictionary.get("id"))
+
+ @memoized
+ def attacher(self):
+ return self._bugzilla().committers.contributor_by_email(self.attacher_email())
+
+ def attacher_email(self):
+ return self._attachment_dictionary.get("attacher_email")
+
+ def bug(self):
+ return self._bug
+
+ def bug_id(self):
+ return int(self._attachment_dictionary.get("bug_id"))
+
+ def is_patch(self):
+ return not not self._attachment_dictionary.get("is_patch")
+
+ def is_obsolete(self):
+ return not not self._attachment_dictionary.get("is_obsolete")
+
+ def is_rollout(self):
+ return self.name().startswith(self.rollout_preamble)
+
+ def name(self):
+ return self._attachment_dictionary.get("name")
+
+ def attach_date(self):
+ return self._attachment_dictionary.get("attach_date")
+
+ def review(self):
+ return self._attachment_dictionary.get("review")
+
+ def commit_queue(self):
+ return self._attachment_dictionary.get("commit-queue")
+
+ def url(self):
+ # FIXME: This should just return
+ # self._bugzilla().attachment_url_for_id(self.id()). scm_unittest.py
+ # depends on the current behavior.
+ return self._attachment_dictionary.get("url")
+
+ def contents(self):
+ # FIXME: We shouldn't be grabbing at _bugzilla.
+ return self._bug._bugzilla.fetch_attachment_contents(self.id())
+
+ def _validate_flag_value(self, flag):
+ email = self._attachment_dictionary.get("%s_email" % flag)
+ if not email:
+ return None
+ # FIXME: This is not a robust way to call committer_by_email
+ committer = getattr(self._bugzilla().committers,
+ "%s_by_email" % flag)(email)
+ if committer:
+ return committer
+ log("Warning, attachment %s on bug %s has invalid %s (%s)" % (
+ self._attachment_dictionary['id'],
+ self._attachment_dictionary['bug_id'], flag, email))
+
+ # FIXME: These could use @memoized like attacher(), but unit tests would need updates.
+ def reviewer(self):
+ if not self._reviewer:
+ self._reviewer = self._validate_flag_value("reviewer")
+ return self._reviewer
+
+ def committer(self):
+ if not self._committer:
+ self._committer = self._validate_flag_value("committer")
+ return self._committer
diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/bug.py b/Tools/Scripts/webkitpy/common/net/bugzilla/bug.py
new file mode 100644
index 000000000..4bf8ec61e
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/bugzilla/bug.py
@@ -0,0 +1,125 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Copyright (c) 2009 Apple Inc. All rights reserved.
+# Copyright (c) 2010 Research In Motion Limited. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from .attachment import Attachment
+
+
+class Bug(object):
+ # FIXME: This class is kinda a hack for now. It exists so we have one
+ # place to hold bug logic, even if much of the code deals with
+ # dictionaries still.
+
+ def __init__(self, bug_dictionary, bugzilla):
+ self.bug_dictionary = bug_dictionary
+ self._bugzilla = bugzilla
+
+ def id(self):
+ return self.bug_dictionary["id"]
+
+ def title(self):
+ # FIXME: Do we need to HTML unescape the title?
+ return self.bug_dictionary["title"]
+
+ def reporter_email(self):
+ return self.bug_dictionary["reporter_email"]
+
+ def assigned_to_email(self):
+ return self.bug_dictionary["assigned_to_email"]
+
+ def cc_emails(self):
+ return self.bug_dictionary["cc_emails"]
+
+ # FIXME: This information should be stored in some sort of webkit_config.py instead of here.
+ unassigned_emails = frozenset([
+ "webkit-unassigned@lists.webkit.org",
+ "webkit-qt-unassigned@trolltech.com",
+ ])
+
+ def is_unassigned(self):
+ return self.assigned_to_email() in self.unassigned_emails
+
+ def status(self):
+ return self.bug_dictionary["bug_status"]
+
+ # Bugzilla has many status states we don't really use in WebKit:
+ # https://bugs.webkit.org/page.cgi?id=fields.html#status
+ _open_states = ["UNCONFIRMED", "NEW", "ASSIGNED", "REOPENED"]
+ _closed_states = ["RESOLVED", "VERIFIED", "CLOSED"]
+
+ def is_open(self):
+ return self.status() in self._open_states
+
+ def is_closed(self):
+ return not self.is_open()
+
+ def duplicate_of(self):
+ return self.bug_dictionary.get('dup_id', None)
+
+ # Rarely do we actually want obsolete attachments
+ def attachments(self, include_obsolete=False):
+ attachments = self.bug_dictionary["attachments"]
+ if not include_obsolete:
+ attachments = filter(lambda attachment:
+ not attachment["is_obsolete"], attachments)
+ return [Attachment(attachment, self) for attachment in attachments]
+
+ def patches(self, include_obsolete=False):
+ return [patch for patch in self.attachments(include_obsolete)
+ if patch.is_patch()]
+
+ def unreviewed_patches(self):
+ return [patch for patch in self.patches() if patch.review() == "?"]
+
+ def reviewed_patches(self, include_invalid=False):
+ patches = [patch for patch in self.patches() if patch.review() == "+"]
+ if include_invalid:
+ return patches
+ # Checking reviewer() ensures that it was both reviewed and has a valid
+ # reviewer.
+ return filter(lambda patch: patch.reviewer(), patches)
+
+ def commit_queued_patches(self, include_invalid=False):
+ patches = [patch for patch in self.patches()
+ if patch.commit_queue() == "+"]
+ if include_invalid:
+ return patches
+ # Checking committer() ensures that it was both commit-queue+'d and has
+ # a valid committer.
+ return filter(lambda patch: patch.committer(), patches)
+
+ def comments(self):
+ return self.bug_dictionary["comments"]
+
+ def is_in_comments(self, message):
+ for comment in self.comments():
+ if message in comment["text"]:
+ return True
+ return False
+
diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/bug_unittest.py b/Tools/Scripts/webkitpy/common/net/bugzilla/bug_unittest.py
new file mode 100644
index 000000000..f20c6010c
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/bugzilla/bug_unittest.py
@@ -0,0 +1,47 @@
+# Copyright (C) 2009 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from .bug import Bug
+
+
+class BugTest(unittest.TestCase):
+ def test_is_unassigned(self):
+ for email in Bug.unassigned_emails:
+ bug = Bug({"assigned_to_email": email}, bugzilla=None)
+ self.assertTrue(bug.is_unassigned())
+ bug = Bug({"assigned_to_email": "test@test.com"}, bugzilla=None)
+ self.assertFalse(bug.is_unassigned())
+
+ def test_is_in_comments(self):
+ bug = Bug({"comments": [{"text": "Message1."},
+ {"text": "Message2. Message3. Message4."}, ], },
+ bugzilla=None)
+ self.assertTrue(bug.is_in_comments("Message3."))
+ self.assertFalse(bug.is_in_comments("Message."))
diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py
new file mode 100644
index 000000000..511f1bbdb
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla.py
@@ -0,0 +1,874 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Copyright (c) 2009 Apple Inc. All rights reserved.
+# Copyright (c) 2010 Research In Motion Limited. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# WebKit's Python module for interacting with Bugzilla
+
+import mimetypes
+import os.path
+import re
+import StringIO
+import urllib
+
+from datetime import datetime # used in timestamp()
+
+from .attachment import Attachment
+from .bug import Bug
+
+from webkitpy.common.system.deprecated_logging import log
+from webkitpy.common.config import committers
+import webkitpy.common.config.urls as config_urls
+from webkitpy.common.net.credentials import Credentials
+from webkitpy.common.system.user import User
+from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup, BeautifulStoneSoup, SoupStrainer
+
+
+class EditUsersParser(object):
+ def __init__(self):
+ self._group_name_to_group_string_cache = {}
+
+ def _login_and_uid_from_row(self, row):
+ first_cell = row.find("td")
+ # The first row is just headers, we skip it.
+ if not first_cell:
+ return None
+ # When there were no results, we have a fake "<none>" entry in the table.
+ if first_cell.find(text="<none>"):
+ return None
+ # Otherwise the <td> contains a single <a> which contains the login name or a single <i> with the string "<none>".
+ anchor_tag = first_cell.find("a")
+ login = unicode(anchor_tag.string).strip()
+ user_id = int(re.search(r"userid=(\d+)", str(anchor_tag['href'])).group(1))
+ return (login, user_id)
+
+ def login_userid_pairs_from_edit_user_results(self, results_page):
+ soup = BeautifulSoup(results_page, convertEntities=BeautifulStoneSoup.HTML_ENTITIES)
+ results_table = soup.find(id="admin_table")
+ login_userid_pairs = [self._login_and_uid_from_row(row) for row in results_table('tr')]
+ # Filter out None from the logins.
+ return filter(lambda pair: bool(pair), login_userid_pairs)
+
+ def _group_name_and_string_from_row(self, row):
+ label_element = row.find('label')
+ group_string = unicode(label_element['for'])
+ group_name = unicode(label_element.find('strong').string).rstrip(':')
+ return (group_name, group_string)
+
+ def user_dict_from_edit_user_page(self, page):
+ soup = BeautifulSoup(page, convertEntities=BeautifulStoneSoup.HTML_ENTITIES)
+ user_table = soup.find("table", {'class': 'main'})
+ user_dict = {}
+ for row in user_table('tr'):
+ label_element = row.find('label')
+ if not label_element:
+ continue # This must not be a row we know how to parse.
+ if row.find('table'):
+ continue # Skip the <tr> holding the groups table.
+
+ key = label_element['for']
+ if "group" in key:
+ key = "groups"
+ value = user_dict.get('groups', set())
+ # We must be parsing a "tr" inside the inner group table.
+ (group_name, _) = self._group_name_and_string_from_row(row)
+ if row.find('input', {'type': 'checkbox', 'checked': 'checked'}):
+ value.add(group_name)
+ else:
+ value = unicode(row.find('td').string).strip()
+ user_dict[key] = value
+ return user_dict
+
+ def _group_rows_from_edit_user_page(self, edit_user_page):
+ soup = BeautifulSoup(edit_user_page, convertEntities=BeautifulSoup.HTML_ENTITIES)
+ return soup('td', {'class': 'groupname'})
+
+ def group_string_from_name(self, edit_user_page, group_name):
+ # Bugzilla uses "group_NUMBER" strings, which may be different per install
+ # so we just look them up once and cache them.
+ if not self._group_name_to_group_string_cache:
+ rows = self._group_rows_from_edit_user_page(edit_user_page)
+ name_string_pairs = map(self._group_name_and_string_from_row, rows)
+ self._group_name_to_group_string_cache = dict(name_string_pairs)
+ return self._group_name_to_group_string_cache[group_name]
+
+
+def timestamp():
+ return datetime.now().strftime("%Y%m%d%H%M%S")
+
+
+# A container for all of the logic for making and parsing bugzilla queries.
+class BugzillaQueries(object):
+
+ def __init__(self, bugzilla):
+ self._bugzilla = bugzilla
+
+ def _is_xml_bugs_form(self, form):
+ # ClientForm.HTMLForm.find_control throws if the control is not found,
+ # so we do a manual search instead:
+ return "xml" in [control.id for control in form.controls]
+
+ # This is kinda a hack. There is probably a better way to get this information from bugzilla.
+ def _parse_result_count(self, results_page):
+ result_count_text = BeautifulSoup(results_page).find(attrs={'class': 'bz_result_count'}).string
+ result_count_parts = result_count_text.strip().split(" ")
+ if result_count_parts[0] == "Zarro":
+ return 0
+ if result_count_parts[0] == "One":
+ return 1
+ return int(result_count_parts[0])
+
+ # Note: _load_query, _fetch_bug and _fetch_bugs_from_advanced_query
+ # are the only methods which access self._bugzilla.
+
+ def _load_query(self, query):
+ self._bugzilla.authenticate()
+ full_url = "%s%s" % (config_urls.bug_server_url, query)
+ return self._bugzilla.browser.open(full_url)
+
+ def _fetch_bugs_from_advanced_query(self, query):
+ results_page = self._load_query(query)
+ if not self._parse_result_count(results_page):
+ return []
+ # Bugzilla results pages have an "XML" submit button at the bottom
+ # which can be used to get an XML page containing all of the <bug> elements.
+ # This is slighty lame that this assumes that _load_query used
+ # self._bugzilla.browser and that it's in an acceptable state.
+ self._bugzilla.browser.select_form(predicate=self._is_xml_bugs_form)
+ bugs_xml = self._bugzilla.browser.submit()
+ return self._bugzilla._parse_bugs_from_xml(bugs_xml)
+
+ def _fetch_bug(self, bug_id):
+ return self._bugzilla.fetch_bug(bug_id)
+
+ def _fetch_bug_ids_advanced_query(self, query):
+ soup = BeautifulSoup(self._load_query(query))
+ # The contents of the <a> inside the cells in the first column happen
+ # to be the bug id.
+ return [int(bug_link_cell.find("a").string)
+ for bug_link_cell in soup('td', "first-child")]
+
+ def _parse_attachment_ids_request_query(self, page):
+ digits = re.compile("\d+")
+ attachment_href = re.compile("attachment.cgi\?id=\d+&action=review")
+ attachment_links = SoupStrainer("a", href=attachment_href)
+ return [int(digits.search(tag["href"]).group(0))
+ for tag in BeautifulSoup(page, parseOnlyThese=attachment_links)]
+
+ def _fetch_attachment_ids_request_query(self, query):
+ return self._parse_attachment_ids_request_query(self._load_query(query))
+
+ def _parse_quips(self, page):
+ soup = BeautifulSoup(page, convertEntities=BeautifulSoup.HTML_ENTITIES)
+ quips = soup.find(text=re.compile(r"Existing quips:")).findNext("ul").findAll("li")
+ return [unicode(quip_entry.string) for quip_entry in quips]
+
+ def fetch_quips(self):
+ return self._parse_quips(self._load_query("/quips.cgi?action=show"))
+
+ # List of all r+'d bugs.
+ def fetch_bug_ids_from_pending_commit_list(self):
+ needs_commit_query_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review%2B"
+ return self._fetch_bug_ids_advanced_query(needs_commit_query_url)
+
+ def fetch_bugs_matching_quicksearch(self, search_string):
+ # We may want to use a more explicit query than "quicksearch".
+ # If quicksearch changes we should probably change to use
+ # a normal buglist.cgi?query_format=advanced query.
+ quicksearch_url = "buglist.cgi?quicksearch=%s" % urllib.quote(search_string)
+ return self._fetch_bugs_from_advanced_query(quicksearch_url)
+
+ # Currently this returns all bugs across all components.
+ # In the future we may wish to extend this API to construct more restricted searches.
+ def fetch_bugs_matching_search(self, search_string, author_email=None):
+ query = "buglist.cgi?query_format=advanced"
+ if search_string:
+ query += "&short_desc_type=allwordssubstr&short_desc=%s" % urllib.quote(search_string)
+ if author_email:
+ query += "&emailreporter1=1&emailtype1=substring&email1=%s" % urllib.quote(search_string)
+ return self._fetch_bugs_from_advanced_query(query)
+
+ def fetch_patches_from_pending_commit_list(self):
+ return sum([self._fetch_bug(bug_id).reviewed_patches()
+ for bug_id in self.fetch_bug_ids_from_pending_commit_list()], [])
+
+ def fetch_bug_ids_from_commit_queue(self):
+ commit_queue_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=commit-queue%2B&order=Last+Changed"
+ return self._fetch_bug_ids_advanced_query(commit_queue_url)
+
+ def fetch_patches_from_commit_queue(self):
+ # This function will only return patches which have valid committers
+ # set. It won't reject patches with invalid committers/reviewers.
+ return sum([self._fetch_bug(bug_id).commit_queued_patches()
+ for bug_id in self.fetch_bug_ids_from_commit_queue()], [])
+
+ def fetch_bug_ids_from_review_queue(self):
+ review_queue_url = "buglist.cgi?query_format=advanced&bug_status=UNCONFIRMED&bug_status=NEW&bug_status=ASSIGNED&bug_status=REOPENED&field0-0-0=flagtypes.name&type0-0-0=equals&value0-0-0=review?"
+ return self._fetch_bug_ids_advanced_query(review_queue_url)
+
+ # This method will make several requests to bugzilla.
+ def fetch_patches_from_review_queue(self, limit=None):
+ # [:None] returns the whole array.
+ return sum([self._fetch_bug(bug_id).unreviewed_patches()
+ for bug_id in self.fetch_bug_ids_from_review_queue()[:limit]], [])
+
+ # NOTE: This is the only client of _fetch_attachment_ids_request_query
+ # This method only makes one request to bugzilla.
+ def fetch_attachment_ids_from_review_queue(self):
+ review_queue_url = "request.cgi?action=queue&type=review&group=type"
+ return self._fetch_attachment_ids_request_query(review_queue_url)
+
+ # This only works if your account has edituser privileges.
+ # We could easily parse https://bugs.webkit.org/userprefs.cgi?tab=permissions to
+ # check permissions, but bugzilla will just return an error if we don't have them.
+ def fetch_login_userid_pairs_matching_substring(self, search_string):
+ review_queue_url = "editusers.cgi?action=list&matchvalue=login_name&matchstr=%s&matchtype=substr" % urllib.quote(search_string)
+ results_page = self._load_query(review_queue_url)
+ # We could pull the EditUsersParser off Bugzilla if needed.
+ return EditUsersParser().login_userid_pairs_from_edit_user_results(results_page)
+
+ # FIXME: We should consider adding a BugzillaUser class.
+ def fetch_logins_matching_substring(self, search_string):
+ pairs = self.fetch_login_userid_pairs_matching_substring(search_string)
+ return map(lambda pair: pair[0], pairs)
+
+
+class Bugzilla(object):
+ def __init__(self, dryrun=False, committers=committers.CommitterList()):
+ self.dryrun = dryrun
+ self.authenticated = False
+ self.queries = BugzillaQueries(self)
+ self.committers = committers
+ self.cached_quips = []
+ self.edit_user_parser = EditUsersParser()
+
+ # FIXME: We should use some sort of Browser mock object when in dryrun
+ # mode (to prevent any mistakes).
+ from webkitpy.thirdparty.autoinstalled.mechanize import Browser
+ self.browser = Browser()
+ # Ignore bugs.webkit.org/robots.txt until we fix it to allow this script.
+ self.browser.set_handle_robots(False)
+
+ def fetch_user(self, user_id):
+ self.authenticate()
+ edit_user_page = self.browser.open(self.edit_user_url_for_id(user_id))
+ return self.edit_user_parser.user_dict_from_edit_user_page(edit_user_page)
+
+ def add_user_to_groups(self, user_id, group_names):
+ self.authenticate()
+ user_edit_page = self.browser.open(self.edit_user_url_for_id(user_id))
+ self.browser.select_form(nr=1)
+ for group_name in group_names:
+ group_string = self.edit_user_parser.group_string_from_name(user_edit_page, group_name)
+ self.browser.find_control(group_string).items[0].selected = True
+ self.browser.submit()
+
+ def quips(self):
+ # We only fetch and parse the list of quips once per instantiation
+ # so that we do not burden bugs.webkit.org.
+ if not self.cached_quips and not self.dryrun:
+ self.cached_quips = self.queries.fetch_quips()
+ return self.cached_quips
+
+ def bug_url_for_bug_id(self, bug_id, xml=False):
+ if not bug_id:
+ return None
+ content_type = "&ctype=xml" if xml else ""
+ return "%sshow_bug.cgi?id=%s%s" % (config_urls.bug_server_url, bug_id, content_type)
+
+ def short_bug_url_for_bug_id(self, bug_id):
+ if not bug_id:
+ return None
+ return "http://webkit.org/b/%s" % bug_id
+
+ def add_attachment_url(self, bug_id):
+ return "%sattachment.cgi?action=enter&bugid=%s" % (config_urls.bug_server_url, bug_id)
+
+ def attachment_url_for_id(self, attachment_id, action="view"):
+ if not attachment_id:
+ return None
+ action_param = ""
+ if action and action != "view":
+ action_param = "&action=%s" % action
+ return "%sattachment.cgi?id=%s%s" % (config_urls.bug_server_url,
+ attachment_id,
+ action_param)
+
+ def edit_user_url_for_id(self, user_id):
+ return "%seditusers.cgi?action=edit&userid=%s" % (config_urls.bug_server_url, user_id)
+
+ def _parse_attachment_flag(self,
+ element,
+ flag_name,
+ attachment,
+ result_key):
+ flag = element.find('flag', attrs={'name': flag_name})
+ if flag:
+ attachment[flag_name] = flag['status']
+ if flag['status'] == '+':
+ attachment[result_key] = flag['setter']
+ # Sadly show_bug.cgi?ctype=xml does not expose the flag modification date.
+
+ def _string_contents(self, soup):
+ # WebKit's bugzilla instance uses UTF-8.
+ # BeautifulStoneSoup always returns Unicode strings, however
+ # the .string method returns a (unicode) NavigableString.
+ # NavigableString can confuse other parts of the code, so we
+ # convert from NavigableString to a real unicode() object using unicode().
+ return unicode(soup.string)
+
+ # Example: 2010-01-20 14:31 PST
+ # FIXME: Some bugzilla dates seem to have seconds in them?
+ # Python does not support timezones out of the box.
+ # Assume that bugzilla always uses PST (which is true for bugs.webkit.org)
+ _bugzilla_date_format = "%Y-%m-%d %H:%M:%S"
+
+ @classmethod
+ def _parse_date(cls, date_string):
+ (date, time, time_zone) = date_string.split(" ")
+ if time.count(':') == 1:
+ # Add seconds into the time.
+ time += ':0'
+ # Ignore the timezone because python doesn't understand timezones out of the box.
+ date_string = "%s %s" % (date, time)
+ return datetime.strptime(date_string, cls._bugzilla_date_format)
+
+ def _date_contents(self, soup):
+ return self._parse_date(self._string_contents(soup))
+
+ def _parse_attachment_element(self, element, bug_id):
+ attachment = {}
+ attachment['bug_id'] = bug_id
+ attachment['is_obsolete'] = (element.has_key('isobsolete') and element['isobsolete'] == "1")
+ attachment['is_patch'] = (element.has_key('ispatch') and element['ispatch'] == "1")
+ attachment['id'] = int(element.find('attachid').string)
+ # FIXME: No need to parse out the url here.
+ attachment['url'] = self.attachment_url_for_id(attachment['id'])
+ attachment["attach_date"] = self._date_contents(element.find("date"))
+ attachment['name'] = self._string_contents(element.find('desc'))
+ attachment['attacher_email'] = self._string_contents(element.find('attacher'))
+ attachment['type'] = self._string_contents(element.find('type'))
+ self._parse_attachment_flag(
+ element, 'review', attachment, 'reviewer_email')
+ self._parse_attachment_flag(
+ element, 'commit-queue', attachment, 'committer_email')
+ return attachment
+
+ def _parse_log_descr_element(self, element):
+ comment = {}
+ comment['comment_email'] = self._string_contents(element.find('who'))
+ comment['comment_date'] = self._date_contents(element.find('bug_when'))
+ comment['text'] = self._string_contents(element.find('thetext'))
+ return comment
+
+ def _parse_bugs_from_xml(self, page):
+ soup = BeautifulSoup(page)
+ # Without the unicode() call, BeautifulSoup occasionally complains of being
+ # passed None for no apparent reason.
+ return [Bug(self._parse_bug_dictionary_from_xml(unicode(bug_xml)), self) for bug_xml in soup('bug')]
+
+ def _parse_bug_dictionary_from_xml(self, page):
+ soup = BeautifulStoneSoup(page, convertEntities=BeautifulStoneSoup.XML_ENTITIES)
+ bug = {}
+ bug["id"] = int(soup.find("bug_id").string)
+ bug["title"] = self._string_contents(soup.find("short_desc"))
+ bug["bug_status"] = self._string_contents(soup.find("bug_status"))
+ dup_id = soup.find("dup_id")
+ if dup_id:
+ bug["dup_id"] = self._string_contents(dup_id)
+ bug["reporter_email"] = self._string_contents(soup.find("reporter"))
+ bug["assigned_to_email"] = self._string_contents(soup.find("assigned_to"))
+ bug["cc_emails"] = [self._string_contents(element) for element in soup.findAll('cc')]
+ bug["attachments"] = [self._parse_attachment_element(element, bug["id"]) for element in soup.findAll('attachment')]
+ bug["comments"] = [self._parse_log_descr_element(element) for element in soup.findAll('long_desc')]
+
+ return bug
+
+ # Makes testing fetch_*_from_bug() possible until we have a better
+ # BugzillaNetwork abstration.
+
+ def _fetch_bug_page(self, bug_id):
+ bug_url = self.bug_url_for_bug_id(bug_id, xml=True)
+ log("Fetching: %s" % bug_url)
+ return self.browser.open(bug_url)
+
+ def fetch_bug_dictionary(self, bug_id):
+ try:
+ return self._parse_bug_dictionary_from_xml(self._fetch_bug_page(bug_id))
+ except KeyboardInterrupt:
+ raise
+ except:
+ self.authenticate()
+ return self._parse_bug_dictionary_from_xml(self._fetch_bug_page(bug_id))
+
+ # FIXME: A BugzillaCache object should provide all these fetch_ methods.
+
+ def fetch_bug(self, bug_id):
+ return Bug(self.fetch_bug_dictionary(bug_id), self)
+
+ def fetch_attachment_contents(self, attachment_id):
+ attachment_url = self.attachment_url_for_id(attachment_id)
+ # We need to authenticate to download patches from security bugs.
+ self.authenticate()
+ return self.browser.open(attachment_url).read()
+
+ def _parse_bug_id_from_attachment_page(self, page):
+ # The "Up" relation happens to point to the bug.
+ up_link = BeautifulSoup(page).find('link', rel='Up')
+ if not up_link:
+ # This attachment does not exist (or you don't have permissions to
+ # view it).
+ return None
+ match = re.search("show_bug.cgi\?id=(?P<bug_id>\d+)", up_link['href'])
+ return int(match.group('bug_id'))
+
+ def bug_id_for_attachment_id(self, attachment_id):
+ self.authenticate()
+
+ attachment_url = self.attachment_url_for_id(attachment_id, 'edit')
+ log("Fetching: %s" % attachment_url)
+ page = self.browser.open(attachment_url)
+ return self._parse_bug_id_from_attachment_page(page)
+
+ # FIXME: This should just return Attachment(id), which should be able to
+ # lazily fetch needed data.
+
+ def fetch_attachment(self, attachment_id):
+ # We could grab all the attachment details off of the attachment edit
+ # page but we already have working code to do so off of the bugs page,
+ # so re-use that.
+ bug_id = self.bug_id_for_attachment_id(attachment_id)
+ if not bug_id:
+ return None
+ attachments = self.fetch_bug(bug_id).attachments(include_obsolete=True)
+ for attachment in attachments:
+ if attachment.id() == int(attachment_id):
+ return attachment
+ return None # This should never be hit.
+
+ def authenticate(self):
+ if self.authenticated:
+ return
+
+ if self.dryrun:
+ log("Skipping log in for dry run...")
+ self.authenticated = True
+ return
+
+ credentials = Credentials(config_urls.bug_server_host, git_prefix="bugzilla")
+
+ attempts = 0
+ while not self.authenticated:
+ attempts += 1
+ username, password = credentials.read_credentials()
+
+ log("Logging in as %s..." % username)
+ self.browser.open(config_urls.bug_server_url +
+ "index.cgi?GoAheadAndLogIn=1")
+ self.browser.select_form(name="login")
+ self.browser['Bugzilla_login'] = username
+ self.browser['Bugzilla_password'] = password
+ response = self.browser.submit()
+
+ match = re.search("<title>(.+?)</title>", response.read())
+ # If the resulting page has a title, and it contains the word
+ # "invalid" assume it's the login failure page.
+ if match and re.search("Invalid", match.group(1), re.IGNORECASE):
+ errorMessage = "Bugzilla login failed: %s" % match.group(1)
+ # raise an exception only if this was the last attempt
+ if attempts < 5:
+ log(errorMessage)
+ else:
+ raise Exception(errorMessage)
+ else:
+ self.authenticated = True
+ self.username = username
+
+ def _commit_queue_flag(self, mark_for_landing, mark_for_commit_queue):
+ if mark_for_landing:
+ return '+'
+ elif mark_for_commit_queue:
+ return '?'
+ return 'X'
+
+ # FIXME: mark_for_commit_queue and mark_for_landing should be joined into a single commit_flag argument.
+ def _fill_attachment_form(self,
+ description,
+ file_object,
+ mark_for_review=False,
+ mark_for_commit_queue=False,
+ mark_for_landing=False,
+ is_patch=False,
+ filename=None,
+ mimetype=None):
+ self.browser['description'] = description
+ if is_patch:
+ self.browser['ispatch'] = ("1",)
+ # FIXME: Should this use self._find_select_element_for_flag?
+ self.browser['flag_type-1'] = ('?',) if mark_for_review else ('X',)
+ self.browser['flag_type-3'] = (self._commit_queue_flag(mark_for_landing, mark_for_commit_queue),)
+
+ filename = filename or "%s.patch" % timestamp()
+ if not mimetype:
+ mimetypes.add_type('text/plain', '.patch') # Make sure mimetypes knows about .patch
+ mimetype, _ = mimetypes.guess_type(filename)
+ if not mimetype:
+ mimetype = "text/plain" # Bugzilla might auto-guess for us and we might not need this?
+ self.browser.add_file(file_object, mimetype, filename, 'data')
+
+ def _file_object_for_upload(self, file_or_string):
+ if hasattr(file_or_string, 'read'):
+ return file_or_string
+ # Only if file_or_string is not already encoded do we want to encode it.
+ if isinstance(file_or_string, unicode):
+ file_or_string = file_or_string.encode('utf-8')
+ return StringIO.StringIO(file_or_string)
+
+ # timestamp argument is just for unittests.
+ def _filename_for_upload(self, file_object, bug_id, extension="txt", timestamp=timestamp):
+ if hasattr(file_object, "name"):
+ return file_object.name
+ return "bug-%s-%s.%s" % (bug_id, timestamp(), extension)
+
+ def add_attachment_to_bug(self,
+ bug_id,
+ file_or_string,
+ description,
+ filename=None,
+ comment_text=None):
+ self.authenticate()
+ log('Adding attachment "%s" to %s' % (description, self.bug_url_for_bug_id(bug_id)))
+ if self.dryrun:
+ log(comment_text)
+ return
+
+ self.browser.open(self.add_attachment_url(bug_id))
+ self.browser.select_form(name="entryform")
+ file_object = self._file_object_for_upload(file_or_string)
+ filename = filename or self._filename_for_upload(file_object, bug_id)
+ self._fill_attachment_form(description, file_object, filename=filename)
+ if comment_text:
+ log(comment_text)
+ self.browser['comment'] = comment_text
+ self.browser.submit()
+
+ # FIXME: The arguments to this function should be simplified and then
+ # this should be merged into add_attachment_to_bug
+ def add_patch_to_bug(self,
+ bug_id,
+ file_or_string,
+ description,
+ comment_text=None,
+ mark_for_review=False,
+ mark_for_commit_queue=False,
+ mark_for_landing=False):
+ self.authenticate()
+ log('Adding patch "%s" to %s' % (description, self.bug_url_for_bug_id(bug_id)))
+
+ if self.dryrun:
+ log(comment_text)
+ return
+
+ self.browser.open(self.add_attachment_url(bug_id))
+ self.browser.select_form(name="entryform")
+ file_object = self._file_object_for_upload(file_or_string)
+ filename = self._filename_for_upload(file_object, bug_id, extension="patch")
+ self._fill_attachment_form(description,
+ file_object,
+ mark_for_review=mark_for_review,
+ mark_for_commit_queue=mark_for_commit_queue,
+ mark_for_landing=mark_for_landing,
+ is_patch=True,
+ filename=filename)
+ if comment_text:
+ log(comment_text)
+ self.browser['comment'] = comment_text
+ self.browser.submit()
+
+ # FIXME: There has to be a more concise way to write this method.
+ def _check_create_bug_response(self, response_html):
+ match = re.search("<title>Bug (?P<bug_id>\d+) Submitted</title>",
+ response_html)
+ if match:
+ return match.group('bug_id')
+
+ match = re.search(
+ '<div id="bugzilla-body">(?P<error_message>.+)<div id="footer">',
+ response_html,
+ re.DOTALL)
+ error_message = "FAIL"
+ if match:
+ text_lines = BeautifulSoup(
+ match.group('error_message')).findAll(text=True)
+ error_message = "\n" + '\n'.join(
+ [" " + line.strip()
+ for line in text_lines if line.strip()])
+ raise Exception("Bug not created: %s" % error_message)
+
+ def create_bug(self,
+ bug_title,
+ bug_description,
+ component=None,
+ diff=None,
+ patch_description=None,
+ cc=None,
+ blocked=None,
+ assignee=None,
+ mark_for_review=False,
+ mark_for_commit_queue=False):
+ self.authenticate()
+
+ log('Creating bug with title "%s"' % bug_title)
+ if self.dryrun:
+ log(bug_description)
+ # FIXME: This will make some paths fail, as they assume this returns an id.
+ return
+
+ self.browser.open(config_urls.bug_server_url + "enter_bug.cgi?product=WebKit")
+ self.browser.select_form(name="Create")
+ component_items = self.browser.find_control('component').items
+ component_names = map(lambda item: item.name, component_items)
+ if not component:
+ component = "New Bugs"
+ if component not in component_names:
+ component = User.prompt_with_list("Please pick a component:", component_names)
+ self.browser["component"] = [component]
+ if cc:
+ self.browser["cc"] = cc
+ if blocked:
+ self.browser["blocked"] = unicode(blocked)
+ if not assignee:
+ assignee = self.username
+ if assignee and not self.browser.find_control("assigned_to").disabled:
+ self.browser["assigned_to"] = assignee
+ self.browser["short_desc"] = bug_title
+ self.browser["comment"] = bug_description
+
+ if diff:
+ # _fill_attachment_form expects a file-like object
+ # Patch files are already binary, so no encoding needed.
+ assert(isinstance(diff, str))
+ patch_file_object = StringIO.StringIO(diff)
+ self._fill_attachment_form(
+ patch_description,
+ patch_file_object,
+ mark_for_review=mark_for_review,
+ mark_for_commit_queue=mark_for_commit_queue,
+ is_patch=True)
+
+ response = self.browser.submit()
+
+ bug_id = self._check_create_bug_response(response.read())
+ log("Bug %s created." % bug_id)
+ log("%sshow_bug.cgi?id=%s" % (config_urls.bug_server_url, bug_id))
+ return bug_id
+
+ def _find_select_element_for_flag(self, flag_name):
+ # FIXME: This will break if we ever re-order attachment flags
+ if flag_name == "review":
+ return self.browser.find_control(type='select', nr=0)
+ elif flag_name == "commit-queue":
+ return self.browser.find_control(type='select', nr=1)
+ raise Exception("Don't know how to find flag named \"%s\"" % flag_name)
+
+ def clear_attachment_flags(self,
+ attachment_id,
+ additional_comment_text=None):
+ self.authenticate()
+
+ comment_text = "Clearing flags on attachment: %s" % attachment_id
+ if additional_comment_text:
+ comment_text += "\n\n%s" % additional_comment_text
+ log(comment_text)
+
+ if self.dryrun:
+ return
+
+ self.browser.open(self.attachment_url_for_id(attachment_id, 'edit'))
+ self.browser.select_form(nr=1)
+ self.browser.set_value(comment_text, name='comment', nr=0)
+ self._find_select_element_for_flag('review').value = ("X",)
+ self._find_select_element_for_flag('commit-queue').value = ("X",)
+ self.browser.submit()
+
+ def set_flag_on_attachment(self,
+ attachment_id,
+ flag_name,
+ flag_value,
+ comment_text=None,
+ additional_comment_text=None):
+ # FIXME: We need a way to test this function on a live bugzilla
+ # instance.
+
+ self.authenticate()
+
+ # FIXME: additional_comment_text seems useless and should be merged into comment-text.
+ if additional_comment_text:
+ comment_text += "\n\n%s" % additional_comment_text
+ log(comment_text)
+
+ if self.dryrun:
+ return
+
+ self.browser.open(self.attachment_url_for_id(attachment_id, 'edit'))
+ self.browser.select_form(nr=1)
+
+ if comment_text:
+ self.browser.set_value(comment_text, name='comment', nr=0)
+
+ self._find_select_element_for_flag(flag_name).value = (flag_value,)
+ self.browser.submit()
+
+ # FIXME: All of these bug editing methods have a ridiculous amount of
+ # copy/paste code.
+
+ def obsolete_attachment(self, attachment_id, comment_text=None):
+ self.authenticate()
+
+ log("Obsoleting attachment: %s" % attachment_id)
+ if self.dryrun:
+ log(comment_text)
+ return
+
+ self.browser.open(self.attachment_url_for_id(attachment_id, 'edit'))
+ self.browser.select_form(nr=1)
+ self.browser.find_control('isobsolete').items[0].selected = True
+ # Also clear any review flag (to remove it from review/commit queues)
+ self._find_select_element_for_flag('review').value = ("X",)
+ self._find_select_element_for_flag('commit-queue').value = ("X",)
+ if comment_text:
+ log(comment_text)
+ # Bugzilla has two textareas named 'comment', one is somehow
+ # hidden. We want the first.
+ self.browser.set_value(comment_text, name='comment', nr=0)
+ self.browser.submit()
+
+ def add_cc_to_bug(self, bug_id, email_address_list):
+ self.authenticate()
+
+ log("Adding %s to the CC list for bug %s" % (email_address_list, bug_id))
+ if self.dryrun:
+ return
+
+ self.browser.open(self.bug_url_for_bug_id(bug_id))
+ self.browser.select_form(name="changeform")
+ self.browser["newcc"] = ", ".join(email_address_list)
+ self.browser.submit()
+
+ def post_comment_to_bug(self, bug_id, comment_text, cc=None):
+ self.authenticate()
+
+ log("Adding comment to bug %s" % bug_id)
+ if self.dryrun:
+ log(comment_text)
+ return
+
+ self.browser.open(self.bug_url_for_bug_id(bug_id))
+ self.browser.select_form(name="changeform")
+ self.browser["comment"] = comment_text
+ if cc:
+ self.browser["newcc"] = ", ".join(cc)
+ self.browser.submit()
+
+ def close_bug_as_fixed(self, bug_id, comment_text=None):
+ self.authenticate()
+
+ log("Closing bug %s as fixed" % bug_id)
+ if self.dryrun:
+ log(comment_text)
+ return
+
+ self.browser.open(self.bug_url_for_bug_id(bug_id))
+ self.browser.select_form(name="changeform")
+ if comment_text:
+ self.browser['comment'] = comment_text
+ self.browser['bug_status'] = ['RESOLVED']
+ self.browser['resolution'] = ['FIXED']
+ self.browser.submit()
+
+ def _has_control(self, form, id):
+ return id in [control.id for control in form.controls]
+
+ def reassign_bug(self, bug_id, assignee=None, comment_text=None):
+ self.authenticate()
+
+ if not assignee:
+ assignee = self.username
+
+ log("Assigning bug %s to %s" % (bug_id, assignee))
+ if self.dryrun:
+ log(comment_text)
+ return
+
+ self.browser.open(self.bug_url_for_bug_id(bug_id))
+ self.browser.select_form(name="changeform")
+
+ if not self._has_control(self.browser, "assigned_to"):
+ log("""Failed to assign bug to you (can't find assigned_to) control.
+Do you have EditBugs privileges at bugs.webkit.org?
+https://bugs.webkit.org/userprefs.cgi?tab=permissions
+
+If not, you should email webkit-committers@lists.webkit.org or ask in #webkit
+for someone to add EditBugs to your bugs.webkit.org account.""")
+ return
+
+ if comment_text:
+ log(comment_text)
+ self.browser["comment"] = comment_text
+ self.browser["assigned_to"] = assignee
+ self.browser.submit()
+
+ def reopen_bug(self, bug_id, comment_text):
+ self.authenticate()
+
+ log("Re-opening bug %s" % bug_id)
+ # Bugzilla requires a comment when re-opening a bug, so we know it will
+ # never be None.
+ log(comment_text)
+ if self.dryrun:
+ return
+
+ self.browser.open(self.bug_url_for_bug_id(bug_id))
+ self.browser.select_form(name="changeform")
+ bug_status = self.browser.find_control("bug_status", type="select")
+ # This is a hack around the fact that ClientForm.ListControl seems to
+ # have no simpler way to ask if a control has an item named "REOPENED"
+ # without using exceptions for control flow.
+ possible_bug_statuses = map(lambda item: item.name, bug_status.items)
+ if "REOPENED" in possible_bug_statuses:
+ bug_status.value = ["REOPENED"]
+ # If the bug was never confirmed it will not have a "REOPENED"
+ # state, but only an "UNCONFIRMED" state.
+ elif "UNCONFIRMED" in possible_bug_statuses:
+ bug_status.value = ["UNCONFIRMED"]
+ else:
+ # FIXME: This logic is slightly backwards. We won't print this
+ # message if the bug is already open with state "UNCONFIRMED".
+ log("Did not reopen bug %s, it appears to already be open with status %s." % (bug_id, bug_status.value))
+ self.browser['comment'] = comment_text
+ self.browser.submit()
diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_mock.py b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_mock.py
new file mode 100644
index 000000000..47f867e03
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_mock.py
@@ -0,0 +1,397 @@
+# Copyright (C) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import datetime
+
+from .bug import Bug
+from .attachment import Attachment
+from webkitpy.common.config.committers import CommitterList, Reviewer
+
+from webkitpy.common.system.deprecated_logging import log
+
+
+def _id_to_object_dictionary(*objects):
+ dictionary = {}
+ for thing in objects:
+ dictionary[thing["id"]] = thing
+ return dictionary
+
+# Testing
+
+
+_patch1 = {
+ "id": 10000,
+ "bug_id": 50000,
+ "url": "http://example.com/10000",
+ "name": "Patch1",
+ "is_obsolete": False,
+ "is_patch": True,
+ "review": "+",
+ "reviewer_email": "foo@bar.com",
+ "commit-queue": "+",
+ "committer_email": "foo@bar.com",
+ "attacher_email": "Contributer1",
+}
+
+
+_patch2 = {
+ "id": 10001,
+ "bug_id": 50000,
+ "url": "http://example.com/10001",
+ "name": "Patch2",
+ "is_obsolete": False,
+ "is_patch": True,
+ "review": "+",
+ "reviewer_email": "reviewer2@webkit.org",
+ "commit-queue": "+",
+ "committer_email": "non-committer@example.com",
+ "attacher_email": "eric@webkit.org",
+}
+
+
+_patch3 = {
+ "id": 10002,
+ "bug_id": 50001,
+ "url": "http://example.com/10002",
+ "name": "Patch3",
+ "is_obsolete": False,
+ "is_patch": True,
+ "review": "?",
+ "attacher_email": "eric@webkit.org",
+}
+
+
+_patch4 = {
+ "id": 10003,
+ "bug_id": 50003,
+ "url": "http://example.com/10002",
+ "name": "Patch3",
+ "is_obsolete": False,
+ "is_patch": True,
+ "review": "+",
+ "commit-queue": "?",
+ "reviewer_email": "foo@bar.com",
+ "attacher_email": "Contributer2",
+}
+
+
+_patch5 = {
+ "id": 10004,
+ "bug_id": 50003,
+ "url": "http://example.com/10002",
+ "name": "Patch5",
+ "is_obsolete": False,
+ "is_patch": True,
+ "review": "+",
+ "reviewer_email": "foo@bar.com",
+ "attacher_email": "eric@webkit.org",
+}
+
+
+_patch6 = { # Valid committer, but no reviewer.
+ "id": 10005,
+ "bug_id": 50003,
+ "url": "http://example.com/10002",
+ "name": "ROLLOUT of r3489",
+ "is_obsolete": False,
+ "is_patch": True,
+ "commit-queue": "+",
+ "committer_email": "foo@bar.com",
+ "attacher_email": "eric@webkit.org",
+}
+
+
+_patch7 = { # Valid review, patch is marked obsolete.
+ "id": 10006,
+ "bug_id": 50002,
+ "url": "http://example.com/10002",
+ "name": "Patch7",
+ "is_obsolete": True,
+ "is_patch": True,
+ "review": "+",
+ "reviewer_email": "foo@bar.com",
+ "attacher_email": "eric@webkit.org",
+}
+
+
+# This matches one of Bug.unassigned_emails
+_unassigned_email = "webkit-unassigned@lists.webkit.org"
+# This is needed for the FlakyTestReporter to believe the bug
+# was filed by one of the webkitpy bots.
+_commit_queue_email = "commit-queue@webkit.org"
+
+
+_bug1 = {
+ "id": 50000,
+ "title": "Bug with two r+'d and cq+'d patches, one of which has an "
+ "invalid commit-queue setter.",
+ "reporter_email": "foo@foo.com",
+ "assigned_to_email": _unassigned_email,
+ "cc_emails": [],
+ "attachments": [_patch1, _patch2],
+ "bug_status": "UNCONFIRMED",
+ "comments": [],
+}
+
+
+_bug2 = {
+ "id": 50001,
+ "title": "Bug with a patch needing review.",
+ "reporter_email": "eric@webkit.org",
+ "assigned_to_email": "foo@foo.com",
+ "cc_emails": ["abarth@webkit.org", ],
+ "attachments": [_patch3],
+ "bug_status": "ASSIGNED",
+ "comments": [{"comment_date": datetime.datetime(2011, 6, 11, 9, 4, 3),
+ "comment_email": "bar@foo.com",
+ "text": "Message1.",
+ },
+ ],
+}
+
+
+_bug3 = {
+ "id": 50002,
+ "title": "The third bug",
+ "reporter_email": "foo@foo.com",
+ "assigned_to_email": _unassigned_email,
+ "cc_emails": [],
+ "attachments": [_patch7],
+ "bug_status": "NEW",
+ "comments": [],
+}
+
+
+_bug4 = {
+ "id": 50003,
+ "title": "The fourth bug",
+ "reporter_email": "foo@foo.com",
+ "assigned_to_email": "foo@foo.com",
+ "cc_emails": [],
+ "attachments": [_patch4, _patch5, _patch6],
+ "bug_status": "REOPENED",
+ "comments": [],
+}
+
+
+_bug5 = {
+ "id": 50004,
+ "title": "The fifth bug",
+ "reporter_email": _commit_queue_email,
+ "assigned_to_email": "foo@foo.com",
+ "cc_emails": [],
+ "attachments": [],
+ "bug_status": "RESOLVED",
+ "dup_id": 50002,
+ "comments": [],
+}
+
+
+class MockBugzillaQueries(object):
+
+ def __init__(self, bugzilla):
+ self._bugzilla = bugzilla
+
+ def _all_bugs(self):
+ return map(lambda bug_dictionary: Bug(bug_dictionary, self._bugzilla),
+ self._bugzilla.bug_cache.values())
+
+ def fetch_bug_ids_from_commit_queue(self):
+ bugs_with_commit_queued_patches = filter(
+ lambda bug: bug.commit_queued_patches(),
+ self._all_bugs())
+ return map(lambda bug: bug.id(), bugs_with_commit_queued_patches)
+
+ def fetch_attachment_ids_from_review_queue(self):
+ unreviewed_patches = sum([bug.unreviewed_patches()
+ for bug in self._all_bugs()], [])
+ return map(lambda patch: patch.id(), unreviewed_patches)
+
+ def fetch_patches_from_commit_queue(self):
+ return sum([bug.commit_queued_patches()
+ for bug in self._all_bugs()], [])
+
+ def fetch_bug_ids_from_pending_commit_list(self):
+ bugs_with_reviewed_patches = filter(lambda bug: bug.reviewed_patches(),
+ self._all_bugs())
+ bug_ids = map(lambda bug: bug.id(), bugs_with_reviewed_patches)
+ # NOTE: This manual hack here is to allow testing logging in
+ # test_assign_to_committer the real pending-commit query on bugzilla
+ # will return bugs with patches which have r+, but are also obsolete.
+ return bug_ids + [50002]
+
+ def fetch_patches_from_pending_commit_list(self):
+ return sum([bug.reviewed_patches() for bug in self._all_bugs()], [])
+
+ def fetch_bugs_matching_search(self, search_string, author_email=None):
+ return [self._bugzilla.fetch_bug(50004), self._bugzilla.fetch_bug(50003)]
+
+
+_mock_reviewers = [Reviewer("Foo Bar", "foo@bar.com"),
+ Reviewer("Reviewer2", "reviewer2@webkit.org")]
+
+
+# FIXME: Bugzilla is the wrong Mock-point. Once we have a BugzillaNetwork
+# class we should mock that instead.
+# Most of this class is just copy/paste from Bugzilla.
+class MockBugzilla(object):
+
+ bug_server_url = "http://example.com"
+
+ bug_cache = _id_to_object_dictionary(_bug1, _bug2, _bug3, _bug4, _bug5)
+
+ attachment_cache = _id_to_object_dictionary(_patch1,
+ _patch2,
+ _patch3,
+ _patch4,
+ _patch5,
+ _patch6,
+ _patch7)
+
+ def __init__(self):
+ self.queries = MockBugzillaQueries(self)
+ # FIXME: This should move onto the Host object, and we should use a MockCommitterList
+ self.committers = CommitterList(reviewers=_mock_reviewers)
+ self._override_patch = None
+
+ def create_bug(self,
+ bug_title,
+ bug_description,
+ component=None,
+ diff=None,
+ patch_description=None,
+ cc=None,
+ blocked=None,
+ mark_for_review=False,
+ mark_for_commit_queue=False):
+ log("MOCK create_bug")
+ log("bug_title: %s" % bug_title)
+ log("bug_description: %s" % bug_description)
+ if component:
+ log("component: %s" % component)
+ if cc:
+ log("cc: %s" % cc)
+ if blocked:
+ log("blocked: %s" % blocked)
+ return 50004
+
+ def quips(self):
+ return ["Good artists copy. Great artists steal. - Pablo Picasso"]
+
+ def fetch_bug(self, bug_id):
+ return Bug(self.bug_cache.get(int(bug_id)), self)
+
+ def set_override_patch(self, patch):
+ self._override_patch = patch
+
+ def fetch_attachment(self, attachment_id):
+ if self._override_patch:
+ return self._override_patch
+
+ attachment_dictionary = self.attachment_cache.get(attachment_id)
+ if not attachment_dictionary:
+ print "MOCK: fetch_attachment: %s is not a known attachment id" % attachment_id
+ return None
+ bug = self.fetch_bug(attachment_dictionary["bug_id"])
+ for attachment in bug.attachments(include_obsolete=True):
+ if attachment.id() == int(attachment_id):
+ return attachment
+
+ def bug_url_for_bug_id(self, bug_id):
+ return "%s/%s" % (self.bug_server_url, bug_id)
+
+ def fetch_bug_dictionary(self, bug_id):
+ return self.bug_cache.get(bug_id)
+
+ def attachment_url_for_id(self, attachment_id, action="view"):
+ action_param = ""
+ if action and action != "view":
+ action_param = "&action=%s" % action
+ return "%s/%s%s" % (self.bug_server_url, attachment_id, action_param)
+
+ def reassign_bug(self, bug_id, assignee=None, comment_text=None):
+ log("MOCK reassign_bug: bug_id=%s, assignee=%s" % (bug_id, assignee))
+ if comment_text:
+ log("-- Begin comment --")
+ log(comment_text)
+ log("-- End comment --")
+
+ def set_flag_on_attachment(self,
+ attachment_id,
+ flag_name,
+ flag_value,
+ comment_text=None,
+ additional_comment_text=None):
+ log("MOCK setting flag '%s' to '%s' on attachment '%s' with comment '%s' and additional comment '%s'" % (
+ flag_name, flag_value, attachment_id, comment_text, additional_comment_text))
+
+ def post_comment_to_bug(self, bug_id, comment_text, cc=None):
+ log("MOCK bug comment: bug_id=%s, cc=%s\n--- Begin comment ---\n%s\n--- End comment ---\n" % (
+ bug_id, cc, comment_text))
+
+ def add_attachment_to_bug(self,
+ bug_id,
+ file_or_string,
+ description,
+ filename=None,
+ comment_text=None):
+ log("MOCK add_attachment_to_bug: bug_id=%s, description=%s filename=%s" % (bug_id, description, filename))
+ if comment_text:
+ log("-- Begin comment --")
+ log(comment_text)
+ log("-- End comment --")
+
+ def add_patch_to_bug(self,
+ bug_id,
+ diff,
+ description,
+ comment_text=None,
+ mark_for_review=False,
+ mark_for_commit_queue=False,
+ mark_for_landing=False):
+ log("MOCK add_patch_to_bug: bug_id=%s, description=%s, mark_for_review=%s, mark_for_commit_queue=%s, mark_for_landing=%s" %
+ (bug_id, description, mark_for_review, mark_for_commit_queue, mark_for_landing))
+ if comment_text:
+ log("-- Begin comment --")
+ log(comment_text)
+ log("-- End comment --")
+
+ def add_cc_to_bug(self, bug_id, ccs):
+ pass
+
+ def obsolete_attachment(self, attachment_id, message=None):
+ pass
+
+ def reopen_bug(self, bug_id, message):
+ log("MOCK reopen_bug %s with comment '%s'" % (bug_id, message))
+
+ def close_bug_as_fixed(self, bug_id, message):
+ pass
+
+ def clear_attachment_flags(self, attachment_id, message):
+ pass
diff --git a/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_unittest.py b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_unittest.py
new file mode 100644
index 000000000..de96fa160
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/bugzilla/bugzilla_unittest.py
@@ -0,0 +1,518 @@
+# Copyright (C) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+import datetime
+import StringIO
+
+from .bugzilla import Bugzilla, BugzillaQueries, EditUsersParser
+
+from webkitpy.common.checkout.changelog import parse_bug_id
+from webkitpy.common.system.outputcapture import OutputCapture
+from webkitpy.common.net.web_mock import MockBrowser
+from webkitpy.thirdparty.mock import Mock
+from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup
+
+
+class BugzillaTest(unittest.TestCase):
+ _example_attachment = '''
+ <attachment
+ isobsolete="1"
+ ispatch="1"
+ isprivate="0"
+ >
+ <attachid>33721</attachid>
+ <date>2009-07-29 10:23 PDT</date>
+ <desc>Fixed whitespace issue</desc>
+ <filename>patch</filename>
+ <type>text/plain</type>
+ <size>9719</size>
+ <attacher>christian.plesner.hansen@gmail.com</attacher>
+ <flag name="review"
+ id="17931"
+ status="+"
+ setter="one@test.com"
+ />
+ <flag name="commit-queue"
+ id="17932"
+ status="+"
+ setter="two@test.com"
+ />
+ </attachment>
+'''
+ _expected_example_attachment_parsing = {
+ 'attach_date': datetime.datetime(2009, 07, 29, 10, 23),
+ 'bug_id' : 100,
+ 'is_obsolete' : True,
+ 'is_patch' : True,
+ 'id' : 33721,
+ 'url' : "https://bugs.webkit.org/attachment.cgi?id=33721",
+ 'name' : "Fixed whitespace issue",
+ 'type' : "text/plain",
+ 'review' : '+',
+ 'reviewer_email' : 'one@test.com',
+ 'commit-queue' : '+',
+ 'committer_email' : 'two@test.com',
+ 'attacher_email' : 'christian.plesner.hansen@gmail.com',
+ }
+
+ def test_url_creation(self):
+ # FIXME: These would be all better as doctests
+ bugs = Bugzilla()
+ self.assertEquals(None, bugs.bug_url_for_bug_id(None))
+ self.assertEquals(None, bugs.short_bug_url_for_bug_id(None))
+ self.assertEquals(None, bugs.attachment_url_for_id(None))
+
+ def test_parse_bug_id(self):
+ # FIXME: These would be all better as doctests
+ bugs = Bugzilla()
+ self.assertEquals(12345, parse_bug_id("http://webkit.org/b/12345"))
+ self.assertEquals(12345, parse_bug_id("http://bugs.webkit.org/show_bug.cgi?id=12345"))
+ self.assertEquals(12345, parse_bug_id(bugs.short_bug_url_for_bug_id(12345)))
+ self.assertEquals(12345, parse_bug_id(bugs.bug_url_for_bug_id(12345)))
+ self.assertEquals(12345, parse_bug_id(bugs.bug_url_for_bug_id(12345, xml=True)))
+
+ # Our bug parser is super-fragile, but at least we're testing it.
+ self.assertEquals(None, parse_bug_id("http://www.webkit.org/b/12345"))
+ self.assertEquals(None, parse_bug_id("http://bugs.webkit.org/show_bug.cgi?ctype=xml&id=12345"))
+
+ _bug_xml = """
+ <bug>
+ <bug_id>32585</bug_id>
+ <creation_ts>2009-12-15 15:17 PST</creation_ts>
+ <short_desc>bug to test webkit-patch&apos;s and commit-queue&apos;s failures</short_desc>
+ <delta_ts>2009-12-27 21:04:50 PST</delta_ts>
+ <reporter_accessible>1</reporter_accessible>
+ <cclist_accessible>1</cclist_accessible>
+ <classification_id>1</classification_id>
+ <classification>Unclassified</classification>
+ <product>WebKit</product>
+ <component>Tools / Tests</component>
+ <version>528+ (Nightly build)</version>
+ <rep_platform>PC</rep_platform>
+ <op_sys>Mac OS X 10.5</op_sys>
+ <bug_status>NEW</bug_status>
+ <priority>P2</priority>
+ <bug_severity>Normal</bug_severity>
+ <target_milestone>---</target_milestone>
+ <everconfirmed>1</everconfirmed>
+ <reporter name="Eric Seidel">eric@webkit.org</reporter>
+ <assigned_to name="Nobody">webkit-unassigned@lists.webkit.org</assigned_to>
+ <cc>foo@bar.com</cc>
+ <cc>example@example.com</cc>
+ <long_desc isprivate="0">
+ <who name="Eric Seidel">eric@webkit.org</who>
+ <bug_when>2009-12-15 15:17:28 PST</bug_when>
+ <thetext>bug to test webkit-patch and commit-queue failures
+
+Ignore this bug. Just for testing failure modes of webkit-patch and the commit-queue.</thetext>
+ </long_desc>
+ <attachment
+ isobsolete="0"
+ ispatch="1"
+ isprivate="0"
+ >
+ <attachid>45548</attachid>
+ <date>2009-12-27 23:51 PST</date>
+ <desc>Patch</desc>
+ <filename>bug-32585-20091228005112.patch</filename>
+ <type>text/plain</type>
+ <size>10882</size>
+ <attacher>mjs@apple.com</attacher>
+
+ <token>1261988248-dc51409e9c421a4358f365fa8bec8357</token>
+ <data encoding="base64">SW5kZXg6IFdlYktpdC9tYWMvQ2hhbmdlTG9nCj09PT09PT09PT09PT09PT09PT09PT09PT09PT09
+removed-because-it-was-really-long
+ZEZpbmlzaExvYWRXaXRoUmVhc29uOnJlYXNvbl07Cit9CisKIEBlbmQKIAogI2VuZGlmCg==
+</data>
+
+ <flag name="review"
+ id="27602"
+ status="?"
+ setter="mjs@apple.com"
+ />
+ </attachment>
+ </bug>
+"""
+
+ _single_bug_xml = """
+<?xml version="1.0" encoding="UTF-8" standalone="yes" ?>
+<!DOCTYPE bugzilla SYSTEM "https://bugs.webkit.org/bugzilla.dtd">
+<bugzilla version="3.2.3"
+ urlbase="https://bugs.webkit.org/"
+ maintainer="admin@webkit.org"
+ exporter="eric@webkit.org"
+>
+%s
+</bugzilla>
+""" % _bug_xml
+
+ _expected_example_bug_parsing = {
+ "id" : 32585,
+ "title" : u"bug to test webkit-patch's and commit-queue's failures",
+ "cc_emails" : ["foo@bar.com", "example@example.com"],
+ "reporter_email" : "eric@webkit.org",
+ "assigned_to_email" : "webkit-unassigned@lists.webkit.org",
+ "bug_status": "NEW",
+ "attachments" : [{
+ "attach_date": datetime.datetime(2009, 12, 27, 23, 51),
+ 'name': u'Patch',
+ 'url' : "https://bugs.webkit.org/attachment.cgi?id=45548",
+ 'is_obsolete': False,
+ 'review': '?',
+ 'is_patch': True,
+ 'attacher_email': 'mjs@apple.com',
+ 'bug_id': 32585,
+ 'type': 'text/plain',
+ 'id': 45548
+ }],
+ "comments" : [{
+ 'comment_date': datetime.datetime(2009, 12, 15, 15, 17, 28),
+ 'comment_email': 'eric@webkit.org',
+ 'text': """bug to test webkit-patch and commit-queue failures
+
+Ignore this bug. Just for testing failure modes of webkit-patch and the commit-queue.""",
+ }]
+ }
+
+ # FIXME: This should move to a central location and be shared by more unit tests.
+ def _assert_dictionaries_equal(self, actual, expected):
+ # Make sure we aren't parsing more or less than we expect
+ self.assertEquals(sorted(actual.keys()), sorted(expected.keys()))
+
+ for key, expected_value in expected.items():
+ self.assertEquals(actual[key], expected_value, ("Failure for key: %s: Actual='%s' Expected='%s'" % (key, actual[key], expected_value)))
+
+ def test_parse_bug_dictionary_from_xml(self):
+ bug = Bugzilla()._parse_bug_dictionary_from_xml(self._single_bug_xml)
+ self._assert_dictionaries_equal(bug, self._expected_example_bug_parsing)
+
+ _sample_multi_bug_xml = """
+<bugzilla version="3.2.3" urlbase="https://bugs.webkit.org/" maintainer="admin@webkit.org" exporter="eric@webkit.org">
+ %s
+ %s
+</bugzilla>
+""" % (_bug_xml, _bug_xml)
+
+ def test_parse_bugs_from_xml(self):
+ bugzilla = Bugzilla()
+ bugs = bugzilla._parse_bugs_from_xml(self._sample_multi_bug_xml)
+ self.assertEquals(len(bugs), 2)
+ self.assertEquals(bugs[0].id(), self._expected_example_bug_parsing['id'])
+ bugs = bugzilla._parse_bugs_from_xml("")
+ self.assertEquals(len(bugs), 0)
+
+ # This could be combined into test_bug_parsing later if desired.
+ def test_attachment_parsing(self):
+ bugzilla = Bugzilla()
+ soup = BeautifulSoup(self._example_attachment)
+ attachment_element = soup.find("attachment")
+ attachment = bugzilla._parse_attachment_element(attachment_element, self._expected_example_attachment_parsing['bug_id'])
+ self.assertTrue(attachment)
+ self._assert_dictionaries_equal(attachment, self._expected_example_attachment_parsing)
+
+ _sample_attachment_detail_page = """
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+ "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+ <head>
+ <title>
+ Attachment 41073 Details for Bug 27314</title>
+<link rel="Top" href="https://bugs.webkit.org/">
+ <link rel="Up" href="show_bug.cgi?id=27314">
+"""
+
+ def test_attachment_detail_bug_parsing(self):
+ bugzilla = Bugzilla()
+ self.assertEquals(27314, bugzilla._parse_bug_id_from_attachment_page(self._sample_attachment_detail_page))
+
+ def test_add_cc_to_bug(self):
+ bugzilla = Bugzilla()
+ bugzilla.browser = MockBrowser()
+ bugzilla.authenticate = lambda: None
+ expected_stderr = "Adding ['adam@example.com'] to the CC list for bug 42\n"
+ OutputCapture().assert_outputs(self, bugzilla.add_cc_to_bug, [42, ["adam@example.com"]], expected_stderr=expected_stderr)
+
+ def _mock_control_item(self, name):
+ mock_item = Mock()
+ mock_item.name = name
+ return mock_item
+
+ def _mock_find_control(self, item_names=[], selected_index=0):
+ mock_control = Mock()
+ mock_control.items = [self._mock_control_item(name) for name in item_names]
+ mock_control.value = [item_names[selected_index]] if item_names else None
+ return lambda name, type: mock_control
+
+ def _assert_reopen(self, item_names=None, selected_index=None, extra_stderr=None):
+ bugzilla = Bugzilla()
+ bugzilla.browser = MockBrowser()
+ bugzilla.authenticate = lambda: None
+
+ mock_find_control = self._mock_find_control(item_names, selected_index)
+ bugzilla.browser.find_control = mock_find_control
+ expected_stderr = "Re-opening bug 42\n['comment']\n"
+ if extra_stderr:
+ expected_stderr += extra_stderr
+ OutputCapture().assert_outputs(self, bugzilla.reopen_bug, [42, ["comment"]], expected_stderr=expected_stderr)
+
+ def test_reopen_bug(self):
+ self._assert_reopen(item_names=["REOPENED", "RESOLVED", "CLOSED"], selected_index=1)
+ self._assert_reopen(item_names=["UNCONFIRMED", "RESOLVED", "CLOSED"], selected_index=1)
+ extra_stderr = "Did not reopen bug 42, it appears to already be open with status ['NEW'].\n"
+ self._assert_reopen(item_names=["NEW", "RESOLVED"], selected_index=0, extra_stderr=extra_stderr)
+
+ def test_file_object_for_upload(self):
+ bugzilla = Bugzilla()
+ file_object = StringIO.StringIO()
+ unicode_tor = u"WebKit \u2661 Tor Arne Vestb\u00F8!"
+ utf8_tor = unicode_tor.encode("utf-8")
+ self.assertEqual(bugzilla._file_object_for_upload(file_object), file_object)
+ self.assertEqual(bugzilla._file_object_for_upload(utf8_tor).read(), utf8_tor)
+ self.assertEqual(bugzilla._file_object_for_upload(unicode_tor).read(), utf8_tor)
+
+ def test_filename_for_upload(self):
+ bugzilla = Bugzilla()
+ mock_file = Mock()
+ mock_file.name = "foo"
+ self.assertEqual(bugzilla._filename_for_upload(mock_file, 1234), 'foo')
+ mock_timestamp = lambda: "now"
+ filename = bugzilla._filename_for_upload(StringIO.StringIO(), 1234, extension="patch", timestamp=mock_timestamp)
+ self.assertEqual(filename, "bug-1234-now.patch")
+
+
+class BugzillaQueriesTest(unittest.TestCase):
+ _sample_request_page = """
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+ "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+ <head>
+ <title>Request Queue</title>
+ </head>
+<body>
+
+<h3>Flag: review</h3>
+ <table class="requests" cellspacing="0" cellpadding="4" border="1">
+ <tr>
+ <th>Requester</th>
+ <th>Requestee</th>
+ <th>Bug</th>
+ <th>Attachment</th>
+ <th>Created</th>
+ </tr>
+ <tr>
+ <td>Shinichiro Hamaji &lt;hamaji&#64;chromium.org&gt;</td>
+ <td></td>
+ <td><a href="show_bug.cgi?id=30015">30015: text-transform:capitalize is failing in CSS2.1 test suite</a></td>
+ <td><a href="attachment.cgi?id=40511&amp;action=review">
+40511: Patch v0</a></td>
+ <td>2009-10-02 04:58 PST</td>
+ </tr>
+ <tr>
+ <td>Zan Dobersek &lt;zandobersek&#64;gmail.com&gt;</td>
+ <td></td>
+ <td><a href="show_bug.cgi?id=26304">26304: [GTK] Add controls for playing html5 video.</a></td>
+ <td><a href="attachment.cgi?id=40722&amp;action=review">
+40722: Media controls, the simple approach</a></td>
+ <td>2009-10-06 09:13 PST</td>
+ </tr>
+ <tr>
+ <td>Zan Dobersek &lt;zandobersek&#64;gmail.com&gt;</td>
+ <td></td>
+ <td><a href="show_bug.cgi?id=26304">26304: [GTK] Add controls for playing html5 video.</a></td>
+ <td><a href="attachment.cgi?id=40723&amp;action=review">
+40723: Adjust the media slider thumb size</a></td>
+ <td>2009-10-06 09:15 PST</td>
+ </tr>
+ </table>
+</body>
+</html>
+"""
+ _sample_quip_page = u"""
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
+ "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+ <head>
+ <title>Bugzilla Quip System</title>
+ </head>
+ <body>
+ <h2>
+
+ Existing quips:
+ </h2>
+ <ul>
+ <li>Everything should be made as simple as possible, but not simpler. - Albert Einstein</li>
+ <li>Good artists copy. Great artists steal. - Pablo Picasso</li>
+ <li>\u00e7gua mole em pedra dura, tanto bate at\u008e que fura.</li>
+
+ </ul>
+ </body>
+</html>
+"""
+
+ def _assert_result_count(self, queries, html, count):
+ self.assertEquals(queries._parse_result_count(html), count)
+
+ def test_parse_result_count(self):
+ queries = BugzillaQueries(None)
+ # Pages with results, always list the count at least twice.
+ self._assert_result_count(queries, '<span class="bz_result_count">314 bugs found.</span><span class="bz_result_count">314 bugs found.</span>', 314)
+ self._assert_result_count(queries, '<span class="bz_result_count">Zarro Boogs found.</span>', 0)
+ self._assert_result_count(queries, '<span class="bz_result_count">\n \nOne bug found.</span>', 1)
+ self.assertRaises(Exception, queries._parse_result_count, ['Invalid'])
+
+ def test_request_page_parsing(self):
+ queries = BugzillaQueries(None)
+ self.assertEquals([40511, 40722, 40723], queries._parse_attachment_ids_request_query(self._sample_request_page))
+
+ def test_quip_page_parsing(self):
+ queries = BugzillaQueries(None)
+ expected_quips = ["Everything should be made as simple as possible, but not simpler. - Albert Einstein", "Good artists copy. Great artists steal. - Pablo Picasso", u"\u00e7gua mole em pedra dura, tanto bate at\u008e que fura."]
+ self.assertEquals(expected_quips, queries._parse_quips(self._sample_quip_page))
+
+ def test_load_query(self):
+ queries = BugzillaQueries(Mock())
+ queries._load_query("request.cgi?action=queue&type=review&group=type")
+
+
+class EditUsersParserTest(unittest.TestCase):
+ _example_user_results = """
+ <div id="bugzilla-body">
+ <p>1 user found.</p>
+ <table id="admin_table" border="1" cellpadding="4" cellspacing="0">
+ <tr bgcolor="#6666FF">
+ <th align="left">Edit user...
+ </th>
+ <th align="left">Real name
+ </th>
+ <th align="left">Account History
+ </th>
+ </tr>
+ <tr>
+ <td >
+ <a href="editusers.cgi?action=edit&amp;userid=1234&amp;matchvalue=login_name&amp;groupid=&amp;grouprestrict=&amp;matchtype=substr&amp;matchstr=abarth%40webkit.org">
+ abarth&#64;webkit.org
+ </a>
+ </td>
+ <td >
+ Adam Barth
+ </td>
+ <td >
+ <a href="editusers.cgi?action=activity&amp;userid=1234&amp;matchvalue=login_name&amp;groupid=&amp;grouprestrict=&amp;matchtype=substr&amp;matchstr=abarth%40webkit.org">
+ View
+ </a>
+ </td>
+ </tr>
+ </table>
+ """
+
+ _example_empty_user_results = """
+ <div id="bugzilla-body">
+ <p>0 users found.</p>
+ <table id="admin_table" border="1" cellpadding="4" cellspacing="0">
+ <tr bgcolor="#6666FF">
+ <th align="left">Edit user...
+ </th>
+ <th align="left">Real name
+ </th>
+ <th align="left">Account History
+ </th>
+ </tr>
+ <tr><td colspan="3" align="center"><i>&lt;none&gt;</i></td></tr>
+ </table>
+ """
+
+ def _assert_login_userid_pairs(self, results_page, expected_logins):
+ parser = EditUsersParser()
+ logins = parser.login_userid_pairs_from_edit_user_results(results_page)
+ self.assertEquals(logins, expected_logins)
+
+ def test_logins_from_editusers_results(self):
+ self._assert_login_userid_pairs(self._example_user_results, [("abarth@webkit.org", 1234)])
+ self._assert_login_userid_pairs(self._example_empty_user_results, [])
+
+ _example_user_page = """<table class="main"><tr>
+ <th><label for="login">Login name:</label></th>
+ <td>eric&#64;webkit.org
+ </td>
+</tr>
+<tr>
+ <th><label for="name">Real name:</label></th>
+ <td>Eric Seidel
+ </td>
+</tr>
+ <tr>
+ <th>Group access:</th>
+ <td>
+ <table class="groups">
+ <tr>
+ </tr>
+ <tr>
+ <th colspan="2">User is a member of these groups</th>
+ </tr>
+ <tr class="direct">
+ <td class="checkbox"><input type="checkbox"
+ id="group_7"
+ name="group_7"
+ value="1" checked="checked" /></td>
+ <td class="groupname">
+ <label for="group_7">
+ <strong>canconfirm:</strong>
+ Can confirm a bug.
+ </label>
+ </td>
+ </tr>
+ <tr class="direct">
+ <td class="checkbox"><input type="checkbox"
+ id="group_6"
+ name="group_6"
+ value="1" /></td>
+ <td class="groupname">
+ <label for="group_6">
+ <strong>editbugs:</strong>
+ Can edit all aspects of any bug.
+ /label>
+ </td>
+ </tr>
+ </table>
+ </td>
+ </tr>
+
+ <tr>
+ <th>Product responsibilities:</th>
+ <td>
+ <em>none</em>
+ </td>
+ </tr>
+</table>"""
+
+ def test_user_dict_from_edit_user_page(self):
+ parser = EditUsersParser()
+ user_dict = parser.user_dict_from_edit_user_page(self._example_user_page)
+ expected_user_dict = {u'login': u'eric@webkit.org', u'groups': set(['canconfirm']), u'name': u'Eric Seidel'}
+ self.assertEqual(expected_user_dict, user_dict)
diff --git a/Tools/Scripts/webkitpy/common/net/buildbot/__init__.py b/Tools/Scripts/webkitpy/common/net/buildbot/__init__.py
new file mode 100644
index 000000000..631ef6ba7
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/buildbot/__init__.py
@@ -0,0 +1,5 @@
+# Required for Python to search this directory for module files
+
+# We only export public API here.
+# It's unclear if Builder and Build need to be public.
+from .buildbot import BuildBot, Builder, Build
diff --git a/Tools/Scripts/webkitpy/common/net/buildbot/buildbot.py b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot.py
new file mode 100644
index 000000000..7581925b0
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot.py
@@ -0,0 +1,494 @@
+# Copyright (c) 2009, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# WebKit's Python module for interacting with WebKit's buildbot
+
+try:
+ import json
+except ImportError:
+ # python 2.5 compatibility
+ import webkitpy.thirdparty.simplejson as json
+
+import operator
+import re
+import urllib
+import urllib2
+
+import webkitpy.common.config.urls as config_urls
+from webkitpy.common.net.failuremap import FailureMap
+from webkitpy.common.net.layouttestresults import LayoutTestResults
+from webkitpy.common.net.networktransaction import NetworkTransaction
+from webkitpy.common.net.regressionwindow import RegressionWindow
+from webkitpy.common.system.logutils import get_logger
+from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup
+
+
+_log = get_logger(__file__)
+
+
+class Builder(object):
+ def __init__(self, name, buildbot):
+ self._name = name
+ self._buildbot = buildbot
+ self._builds_cache = {}
+ self._revision_to_build_number = None
+ from webkitpy.thirdparty.autoinstalled.mechanize import Browser
+ self._browser = Browser()
+ self._browser.set_handle_robots(False) # The builder pages are excluded by robots.txt
+
+ def name(self):
+ return self._name
+
+ def results_url(self):
+ return "%s/results/%s" % (self._buildbot.buildbot_url, self.url_encoded_name())
+
+ # In addition to per-build results, the build.chromium.org builders also
+ # keep a directory that accumulates test results over many runs.
+ def accumulated_results_url(self):
+ return None
+
+ def url_encoded_name(self):
+ return urllib.quote(self._name)
+
+ def url(self):
+ return "%s/builders/%s" % (self._buildbot.buildbot_url, self.url_encoded_name())
+
+ # This provides a single place to mock
+ def _fetch_build(self, build_number):
+ build_dictionary = self._buildbot._fetch_build_dictionary(self, build_number)
+ if not build_dictionary:
+ return None
+ revision_string = build_dictionary['sourceStamp']['revision']
+ return Build(self,
+ build_number=int(build_dictionary['number']),
+ # 'revision' may be None if a trunk build was started by the force-build button on the web page.
+ revision=(int(revision_string) if revision_string else None),
+ # Buildbot uses any nubmer other than 0 to mean fail. Since we fetch with
+ # filter=1, passing builds may contain no 'results' value.
+ is_green=(not build_dictionary.get('results')),
+ )
+
+ def build(self, build_number):
+ if not build_number:
+ return None
+ cached_build = self._builds_cache.get(build_number)
+ if cached_build:
+ return cached_build
+
+ build = self._fetch_build(build_number)
+ self._builds_cache[build_number] = build
+ return build
+
+ def latest_cached_build(self):
+ revision_build_pairs = self.revision_build_pairs_with_results()
+ revision_build_pairs.sort(key=lambda i: i[1])
+ latest_build_number = revision_build_pairs[-1][1]
+ return self.build(latest_build_number)
+
+ def force_build(self, username="webkit-patch", comments=None):
+ def predicate(form):
+ try:
+ return form.find_control("username")
+ except Exception, e:
+ return False
+ self._browser.open(self.url())
+ self._browser.select_form(predicate=predicate)
+ self._browser["username"] = username
+ if comments:
+ self._browser["comments"] = comments
+ return self._browser.submit()
+
+ file_name_regexp = re.compile(r"r(?P<revision>\d+) \((?P<build_number>\d+)\)")
+ def _revision_and_build_for_filename(self, filename):
+ # Example: "r47483 (1)/" or "r47483 (1).zip"
+ match = self.file_name_regexp.match(filename)
+ return (int(match.group("revision")), int(match.group("build_number")))
+
+ def _fetch_revision_to_build_map(self):
+ # All _fetch requests go through _buildbot for easier mocking
+ # FIXME: This should use NetworkTransaction's 404 handling instead.
+ try:
+ # FIXME: This method is horribly slow due to the huge network load.
+ # FIXME: This is a poor way to do revision -> build mapping.
+ # Better would be to ask buildbot through some sort of API.
+ print "Loading revision/build list from %s." % self.results_url()
+ print "This may take a while..."
+ result_files = self._buildbot._fetch_twisted_directory_listing(self.results_url())
+ except urllib2.HTTPError, error:
+ if error.code != 404:
+ raise
+ result_files = []
+
+ # This assumes there was only one build per revision, which is false but we don't care for now.
+ return dict([self._revision_and_build_for_filename(file_info["filename"]) for file_info in result_files])
+
+ def _revision_to_build_map(self):
+ if not self._revision_to_build_number:
+ self._revision_to_build_number = self._fetch_revision_to_build_map()
+ return self._revision_to_build_number
+
+ def revision_build_pairs_with_results(self):
+ return self._revision_to_build_map().items()
+
+ # This assumes there can be only one build per revision, which is false, but we don't care for now.
+ def build_for_revision(self, revision, allow_failed_lookups=False):
+ # NOTE: This lookup will fail if that exact revision was never built.
+ build_number = self._revision_to_build_map().get(int(revision))
+ if not build_number:
+ return None
+ build = self.build(build_number)
+ if not build and allow_failed_lookups:
+ # Builds for old revisions with fail to lookup via buildbot's json api.
+ build = Build(self,
+ build_number=build_number,
+ revision=revision,
+ is_green=False,
+ )
+ return build
+
+ def find_regression_window(self, red_build, look_back_limit=30):
+ if not red_build or red_build.is_green():
+ return RegressionWindow(None, None)
+ common_failures = None
+ current_build = red_build
+ build_after_current_build = None
+ look_back_count = 0
+ while current_build:
+ if current_build.is_green():
+ # current_build can't possibly have any failures in common
+ # with red_build because it's green.
+ break
+ results = current_build.layout_test_results()
+ # We treat a lack of results as if all the test failed.
+ # This occurs, for example, when we can't compile at all.
+ if results:
+ failures = set(results.failing_tests())
+ if common_failures == None:
+ common_failures = failures
+ else:
+ common_failures = common_failures.intersection(failures)
+ if not common_failures:
+ # current_build doesn't have any failures in common with
+ # the red build we're worried about. We assume that any
+ # failures in current_build were due to flakiness.
+ break
+ look_back_count += 1
+ if look_back_count > look_back_limit:
+ return RegressionWindow(None, current_build, failing_tests=common_failures)
+ build_after_current_build = current_build
+ current_build = current_build.previous_build()
+ # We must iterate at least once because red_build is red.
+ assert(build_after_current_build)
+ # Current build must either be green or have no failures in common
+ # with red build, so we've found our failure transition.
+ return RegressionWindow(current_build, build_after_current_build, failing_tests=common_failures)
+
+ def find_blameworthy_regression_window(self, red_build_number, look_back_limit=30, avoid_flakey_tests=True):
+ red_build = self.build(red_build_number)
+ regression_window = self.find_regression_window(red_build, look_back_limit)
+ if not regression_window.build_before_failure():
+ return None # We ran off the limit of our search
+ # If avoid_flakey_tests, require at least 2 bad builds before we
+ # suspect a real failure transition.
+ if avoid_flakey_tests and regression_window.failing_build() == red_build:
+ return None
+ return regression_window
+
+
+class Build(object):
+ def __init__(self, builder, build_number, revision, is_green):
+ self._builder = builder
+ self._number = build_number
+ self._revision = revision
+ self._is_green = is_green
+ self._layout_test_results = None
+
+ @staticmethod
+ def build_url(builder, build_number):
+ return "%s/builds/%s" % (builder.url(), build_number)
+
+ def url(self):
+ return self.build_url(self.builder(), self._number)
+
+ def results_url(self):
+ results_directory = "r%s (%s)" % (self.revision(), self._number)
+ return "%s/%s" % (self._builder.results_url(), urllib.quote(results_directory))
+
+ def results_zip_url(self):
+ return "%s.zip" % self.results_url()
+
+ def _fetch_file_from_results(self, file_name):
+ # It seems this can return None if the url redirects and then returns 404.
+ result = urllib2.urlopen("%s/%s" % (self.results_url(), file_name))
+ if not result:
+ return None
+ # urlopen returns a file-like object which sometimes works fine with str()
+ # but sometimes is a addinfourl object. In either case calling read() is correct.
+ return result.read()
+
+ def layout_test_results(self):
+ if self._layout_test_results:
+ return self._layout_test_results
+
+ # FIXME: This should cache that the result was a 404 and stop hitting the network.
+ results_file = NetworkTransaction(convert_404_to_None=True).run(lambda: self._fetch_file_from_results("full_results.json"))
+ if not results_file:
+ results_file = NetworkTransaction(convert_404_to_None=True).run(lambda: self._fetch_file_from_results("results.html"))
+
+ # results_from_string accepts either ORWT html or NRWT json.
+ self._layout_test_results = LayoutTestResults.results_from_string(results_file)
+ return self._layout_test_results
+
+ def builder(self):
+ return self._builder
+
+ def revision(self):
+ return self._revision
+
+ def is_green(self):
+ return self._is_green
+
+ def previous_build(self):
+ # previous_build() allows callers to avoid assuming build numbers are sequential.
+ # They may not be sequential across all master changes, or when non-trunk builds are made.
+ return self._builder.build(self._number - 1)
+
+
+class BuildBot(object):
+ _builder_factory = Builder
+ _default_url = config_urls.buildbot_url
+
+ def __init__(self, url=None):
+ self.buildbot_url = url if url else self._default_url
+ self._builder_by_name = {}
+
+ # If any core builder is red we should not be landing patches. Other
+ # builders should be added to this list once they are known to be
+ # reliable.
+ # See https://bugs.webkit.org/show_bug.cgi?id=33296 and related bugs.
+ self.core_builder_names_regexps = [
+ "SnowLeopard.*Build",
+ "SnowLeopard.*\(Test",
+ "SnowLeopard.*\(WebKit2 Test",
+ "Leopard.*\((?:Build|Test)",
+ "Windows.*Build",
+ "Windows.*\(Test",
+ "WinCE",
+ "EFL",
+ "GTK.*32",
+ "GTK.*64",
+ "Qt",
+ "Chromium.*(Mac|Linux|Win).*Release$",
+ "Chromium.*(Mac|Linux|Win).*Release.*\(Tests",
+ ]
+
+ def _parse_last_build_cell(self, builder, cell):
+ status_link = cell.find('a')
+ if status_link:
+ # Will be either a revision number or a build number
+ revision_string = status_link.string
+ # If revision_string has non-digits assume it's not a revision number.
+ builder['built_revision'] = int(revision_string) \
+ if not re.match('\D', revision_string) \
+ else None
+
+ # FIXME: We treat slave lost as green even though it is not to
+ # work around the Qts bot being on a broken internet connection.
+ # The real fix is https://bugs.webkit.org/show_bug.cgi?id=37099
+ builder['is_green'] = not re.search('fail', cell.renderContents()) or \
+ not not re.search('lost', cell.renderContents())
+
+ status_link_regexp = r"builders/(?P<builder_name>.*)/builds/(?P<build_number>\d+)"
+ link_match = re.match(status_link_regexp, status_link['href'])
+ builder['build_number'] = int(link_match.group("build_number"))
+ else:
+ # We failed to find a link in the first cell, just give up. This
+ # can happen if a builder is just-added, the first cell will just
+ # be "no build"
+ # Other parts of the code depend on is_green being present.
+ builder['is_green'] = False
+ builder['built_revision'] = None
+ builder['build_number'] = None
+
+ def _parse_current_build_cell(self, builder, cell):
+ activity_lines = cell.renderContents().split("<br />")
+ builder["activity"] = activity_lines[0] # normally "building" or "idle"
+ # The middle lines document how long left for any current builds.
+ match = re.match("(?P<pending_builds>\d) pending", activity_lines[-1])
+ builder["pending_builds"] = int(match.group("pending_builds")) if match else 0
+
+ def _parse_builder_status_from_row(self, status_row):
+ status_cells = status_row.findAll('td')
+ builder = {}
+
+ # First cell is the name
+ name_link = status_cells[0].find('a')
+ builder["name"] = unicode(name_link.string)
+
+ self._parse_last_build_cell(builder, status_cells[1])
+ self._parse_current_build_cell(builder, status_cells[2])
+ return builder
+
+ def _matches_regexps(self, builder_name, name_regexps):
+ for name_regexp in name_regexps:
+ if re.match(name_regexp, builder_name):
+ return True
+ return False
+
+ # FIXME: Should move onto Builder
+ def _is_core_builder(self, builder_name):
+ return self._matches_regexps(builder_name, self.core_builder_names_regexps)
+
+ # FIXME: This method needs to die, but is used by a unit test at the moment.
+ def _builder_statuses_with_names_matching_regexps(self, builder_statuses, name_regexps):
+ return [builder for builder in builder_statuses if self._matches_regexps(builder["name"], name_regexps)]
+
+ def red_core_builders(self):
+ return [builder for builder in self.core_builder_statuses() if not builder["is_green"]]
+
+ def red_core_builders_names(self):
+ return [builder["name"] for builder in self.red_core_builders()]
+
+ def idle_red_core_builders(self):
+ return [builder for builder in self.red_core_builders() if builder["activity"] == "idle"]
+
+ def core_builders_are_green(self):
+ return not self.red_core_builders()
+
+ # FIXME: These _fetch methods should move to a networking class.
+ def _fetch_build_dictionary(self, builder, build_number):
+ # Note: filter=1 will remove None and {} and '', which cuts noise but can
+ # cause keys to be missing which you might otherwise expect.
+ # FIXME: The bot sends a *huge* amount of data for each request, we should
+ # find a way to reduce the response size further.
+ json_url = "%s/json/builders/%s/builds/%s?filter=1" % (self.buildbot_url, urllib.quote(builder.name()), build_number)
+ try:
+ return json.load(urllib2.urlopen(json_url))
+ except urllib2.URLError, err:
+ build_url = Build.build_url(builder, build_number)
+ _log.error("Error fetching data for %s build %s (%s, json: %s): %s" % (builder.name(), build_number, build_url, json_url, err))
+ return None
+ except ValueError, err:
+ build_url = Build.build_url(builder, build_number)
+ _log.error("Error decoding json data from %s: %s" % (build_url, err))
+ return None
+
+ def _fetch_one_box_per_builder(self):
+ build_status_url = "%s/one_box_per_builder" % self.buildbot_url
+ return urllib2.urlopen(build_status_url)
+
+ def _file_cell_text(self, file_cell):
+ """Traverses down through firstChild elements until one containing a string is found, then returns that string"""
+ element = file_cell
+ while element.string is None and element.contents:
+ element = element.contents[0]
+ return element.string
+
+ def _parse_twisted_file_row(self, file_row):
+ string_or_empty = lambda string: unicode(string) if string else u""
+ file_cells = file_row.findAll('td')
+ return {
+ "filename": string_or_empty(self._file_cell_text(file_cells[0])),
+ "size": string_or_empty(self._file_cell_text(file_cells[1])),
+ "type": string_or_empty(self._file_cell_text(file_cells[2])),
+ "encoding": string_or_empty(self._file_cell_text(file_cells[3])),
+ }
+
+ def _parse_twisted_directory_listing(self, page):
+ soup = BeautifulSoup(page)
+ # HACK: Match only table rows with a class to ignore twisted header/footer rows.
+ file_rows = soup.find('table').findAll('tr', {'class': re.compile(r'\b(?:directory|file)\b')})
+ return [self._parse_twisted_file_row(file_row) for file_row in file_rows]
+
+ # FIXME: There should be a better way to get this information directly from twisted.
+ def _fetch_twisted_directory_listing(self, url):
+ return self._parse_twisted_directory_listing(urllib2.urlopen(url))
+
+ def builders(self):
+ return [self.builder_with_name(status["name"]) for status in self.builder_statuses()]
+
+ # This method pulls from /one_box_per_builder as an efficient way to get information about
+ def builder_statuses(self):
+ soup = BeautifulSoup(self._fetch_one_box_per_builder())
+ return [self._parse_builder_status_from_row(status_row) for status_row in soup.find('table').findAll('tr')]
+
+ def core_builder_statuses(self):
+ return [builder for builder in self.builder_statuses() if self._is_core_builder(builder["name"])]
+
+ def builder_with_name(self, name):
+ builder = self._builder_by_name.get(name)
+ if not builder:
+ builder = self._builder_factory(name, self)
+ self._builder_by_name[name] = builder
+ return builder
+
+ def failure_map(self, only_core_builders=True):
+ builder_statuses = self.core_builder_statuses() if only_core_builders else self.builder_statuses()
+ failure_map = FailureMap()
+ revision_to_failing_bots = {}
+ for builder_status in builder_statuses:
+ if builder_status["is_green"]:
+ continue
+ builder = self.builder_with_name(builder_status["name"])
+ regression_window = builder.find_blameworthy_regression_window(builder_status["build_number"])
+ if regression_window:
+ failure_map.add_regression_window(builder, regression_window)
+ return failure_map
+
+ # This makes fewer requests than calling Builder.latest_build would. It grabs all builder
+ # statuses in one request using self.builder_statuses (fetching /one_box_per_builder instead of builder pages).
+ def _latest_builds_from_builders(self, only_core_builders=True):
+ builder_statuses = self.core_builder_statuses() if only_core_builders else self.builder_statuses()
+ return [self.builder_with_name(status["name"]).build(status["build_number"]) for status in builder_statuses]
+
+ def _build_at_or_before_revision(self, build, revision):
+ while build:
+ if build.revision() <= revision:
+ return build
+ build = build.previous_build()
+
+ def last_green_revision(self, only_core_builders=True):
+ builds = self._latest_builds_from_builders(only_core_builders)
+ target_revision = builds[0].revision()
+ # An alternate way to do this would be to start at one revision and walk backwards
+ # checking builder.build_for_revision, however build_for_revision is very slow on first load.
+ while True:
+ # Make builds agree on revision
+ builds = [self._build_at_or_before_revision(build, target_revision) for build in builds]
+ if None in builds: # One of the builds failed to load from the server.
+ return None
+ min_revision = min(map(lambda build: build.revision(), builds))
+ if min_revision != target_revision:
+ target_revision = min_revision
+ continue # Builds don't all agree on revision, keep searching
+ # Check to make sure they're all green
+ all_are_green = reduce(operator.and_, map(lambda build: build.is_green(), builds))
+ if not all_are_green:
+ target_revision -= 1
+ continue
+ return min_revision
diff --git a/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_mock.py b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_mock.py
new file mode 100644
index 000000000..c6cfceb11
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_mock.py
@@ -0,0 +1,115 @@
+# Copyright (C) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+class MockBuilder(object):
+ def __init__(self, name):
+ self._name = name
+
+ def name(self):
+ return self._name
+
+ def results_url(self):
+ return "http://example.com/builders/%s/results" % self.name()
+
+ def accumulated_results_url(self):
+ return "http://example.com/f/builders/%s/results/layout-test-results" % self.name()
+
+ def force_build(self, username, comments):
+ log("MOCK: force_build: name=%s, username=%s, comments=%s" % (
+ self._name, username, comments))
+
+
+class MockFailureMap(object):
+ def __init__(self, buildbot):
+ self._buildbot = buildbot
+
+ def is_empty(self):
+ return False
+
+ def filter_out_old_failures(self, is_old_revision):
+ pass
+
+ def failing_revisions(self):
+ return [29837]
+
+ def builders_failing_for(self, revision):
+ return [self._buildbot.builder_with_name("Builder1")]
+
+ def tests_failing_for(self, revision):
+ return ["mock-test-1"]
+
+ def failing_tests(self):
+ return set(["mock-test-1"])
+
+
+class MockBuildBot(object):
+ def __init__(self):
+ self._mock_builder1_status = {
+ "name": "Builder1",
+ "is_green": True,
+ "activity": "building",
+ }
+ self._mock_builder2_status = {
+ "name": "Builder2",
+ "is_green": True,
+ "activity": "idle",
+ }
+
+ def builder_with_name(self, name):
+ return MockBuilder(name)
+
+ def builder_statuses(self):
+ return [
+ self._mock_builder1_status,
+ self._mock_builder2_status,
+ ]
+
+ def red_core_builders_names(self):
+ if not self._mock_builder2_status["is_green"]:
+ return [self._mock_builder2_status["name"]]
+ return []
+
+ def red_core_builders(self):
+ if not self._mock_builder2_status["is_green"]:
+ return [self._mock_builder2_status]
+ return []
+
+ def idle_red_core_builders(self):
+ if not self._mock_builder2_status["is_green"]:
+ return [self._mock_builder2_status]
+ return []
+
+ def last_green_revision(self):
+ return 9479
+
+ def light_tree_on_fire(self):
+ self._mock_builder2_status["is_green"] = False
+
+ def failure_map(self):
+ return MockFailureMap(self)
diff --git a/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_unittest.py b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_unittest.py
new file mode 100644
index 000000000..f824d2f22
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/buildbot/buildbot_unittest.py
@@ -0,0 +1,472 @@
+# Copyright (C) 2009 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from webkitpy.common.net.layouttestresults import LayoutTestResults
+from webkitpy.common.net.buildbot import BuildBot, Builder, Build
+from webkitpy.layout_tests.models import test_results
+from webkitpy.layout_tests.models import test_failures
+from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup
+
+
+class BuilderTest(unittest.TestCase):
+ def _mock_test_result(self, testname):
+ return test_results.TestResult(testname, [test_failures.FailureTextMismatch()])
+
+ def _install_fetch_build(self, failure):
+ def _mock_fetch_build(build_number):
+ build = Build(
+ builder=self.builder,
+ build_number=build_number,
+ revision=build_number + 1000,
+ is_green=build_number < 4
+ )
+ results = [self._mock_test_result(testname) for testname in failure(build_number)]
+ build._layout_test_results = LayoutTestResults(results)
+ return build
+ self.builder._fetch_build = _mock_fetch_build
+
+ def setUp(self):
+ self.buildbot = BuildBot()
+ self.builder = Builder(u"Test Builder \u2661", self.buildbot)
+ self._install_fetch_build(lambda build_number: ["test1", "test2"])
+
+ def test_find_regression_window(self):
+ regression_window = self.builder.find_regression_window(self.builder.build(10))
+ self.assertEqual(regression_window.build_before_failure().revision(), 1003)
+ self.assertEqual(regression_window.failing_build().revision(), 1004)
+
+ regression_window = self.builder.find_regression_window(self.builder.build(10), look_back_limit=2)
+ self.assertEqual(regression_window.build_before_failure(), None)
+ self.assertEqual(regression_window.failing_build().revision(), 1008)
+
+ def test_none_build(self):
+ self.builder._fetch_build = lambda build_number: None
+ regression_window = self.builder.find_regression_window(self.builder.build(10))
+ self.assertEqual(regression_window.build_before_failure(), None)
+ self.assertEqual(regression_window.failing_build(), None)
+
+ def test_flaky_tests(self):
+ self._install_fetch_build(lambda build_number: ["test1"] if build_number % 2 else ["test2"])
+ regression_window = self.builder.find_regression_window(self.builder.build(10))
+ self.assertEqual(regression_window.build_before_failure().revision(), 1009)
+ self.assertEqual(regression_window.failing_build().revision(), 1010)
+
+ def test_failure_and_flaky(self):
+ self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number % 2 else ["test2"])
+ regression_window = self.builder.find_regression_window(self.builder.build(10))
+ self.assertEqual(regression_window.build_before_failure().revision(), 1003)
+ self.assertEqual(regression_window.failing_build().revision(), 1004)
+
+ def test_no_results(self):
+ self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number % 2 else ["test2"])
+ regression_window = self.builder.find_regression_window(self.builder.build(10))
+ self.assertEqual(regression_window.build_before_failure().revision(), 1003)
+ self.assertEqual(regression_window.failing_build().revision(), 1004)
+
+ def test_failure_after_flaky(self):
+ self._install_fetch_build(lambda build_number: ["test1", "test2"] if build_number > 6 else ["test3"])
+ regression_window = self.builder.find_regression_window(self.builder.build(10))
+ self.assertEqual(regression_window.build_before_failure().revision(), 1006)
+ self.assertEqual(regression_window.failing_build().revision(), 1007)
+
+ def test_find_blameworthy_regression_window(self):
+ self.assertEqual(self.builder.find_blameworthy_regression_window(10).revisions(), [1004])
+ self.assertEqual(self.builder.find_blameworthy_regression_window(10, look_back_limit=2), None)
+ # Flakey test avoidance requires at least 2 red builds:
+ self.assertEqual(self.builder.find_blameworthy_regression_window(4), None)
+ self.assertEqual(self.builder.find_blameworthy_regression_window(4, avoid_flakey_tests=False).revisions(), [1004])
+ # Green builder:
+ self.assertEqual(self.builder.find_blameworthy_regression_window(3), None)
+
+ def test_build_caching(self):
+ self.assertEqual(self.builder.build(10), self.builder.build(10))
+
+ def test_build_and_revision_for_filename(self):
+ expectations = {
+ "r47483 (1)/" : (47483, 1),
+ "r47483 (1).zip" : (47483, 1),
+ }
+ for filename, revision_and_build in expectations.items():
+ self.assertEqual(self.builder._revision_and_build_for_filename(filename), revision_and_build)
+
+ def test_fetch_build(self):
+ buildbot = BuildBot()
+ builder = Builder(u"Test Builder \u2661", buildbot)
+
+ def mock_fetch_build_dictionary(self, build_number):
+ build_dictionary = {
+ "sourceStamp": {
+ "revision": None, # revision=None means a trunk build started from the force-build button on the builder page.
+ },
+ "number": int(build_number),
+ # Intentionally missing the 'results' key, meaning it's a "pass" build.
+ }
+ return build_dictionary
+ buildbot._fetch_build_dictionary = mock_fetch_build_dictionary
+ self.assertNotEqual(builder._fetch_build(1), None)
+
+
+class BuildTest(unittest.TestCase):
+ def test_layout_test_results(self):
+ buildbot = BuildBot()
+ builder = Builder(u"Foo Builder (test)", buildbot)
+ build = Build(builder, None, None, None)
+ build._fetch_file_from_results = lambda file_name: None
+ # Test that layout_test_results() returns None if the fetch fails.
+ self.assertEqual(build.layout_test_results(), None)
+
+
+class BuildBotTest(unittest.TestCase):
+
+ _example_one_box_status = '''
+ <table>
+ <tr>
+ <td class="box"><a href="builders/Windows%20Debug%20%28Tests%29">Windows Debug (Tests)</a></td>
+ <td align="center" class="LastBuild box success"><a href="builders/Windows%20Debug%20%28Tests%29/builds/3693">47380</a><br />build<br />successful</td>
+ <td align="center" class="Activity building">building<br />ETA in<br />~ 14 mins<br />at 13:40</td>
+ <tr>
+ <td class="box"><a href="builders/SnowLeopard%20Intel%20Release">SnowLeopard Intel Release</a></td>
+ <td class="LastBuild box" >no build</td>
+ <td align="center" class="Activity building">building<br />< 1 min</td>
+ <tr>
+ <td class="box"><a href="builders/Qt%20Linux%20Release">Qt Linux Release</a></td>
+ <td align="center" class="LastBuild box failure"><a href="builders/Qt%20Linux%20Release/builds/654">47383</a><br />failed<br />compile-webkit</td>
+ <td align="center" class="Activity idle">idle<br />3 pending</td>
+ <tr>
+ <td class="box"><a href="builders/Qt%20Windows%2032-bit%20Debug">Qt Windows 32-bit Debug</a></td>
+ <td align="center" class="LastBuild box failure"><a href="builders/Qt%20Windows%2032-bit%20Debug/builds/2090">60563</a><br />failed<br />failed<br />slave<br />lost</td>
+ <td align="center" class="Activity building">building<br />ETA in<br />~ 5 mins<br />at 08:25</td>
+ </table>
+'''
+ _expected_example_one_box_parsings = [
+ {
+ 'is_green': True,
+ 'build_number' : 3693,
+ 'name': u'Windows Debug (Tests)',
+ 'built_revision': 47380,
+ 'activity': 'building',
+ 'pending_builds': 0,
+ },
+ {
+ 'is_green': False,
+ 'build_number' : None,
+ 'name': u'SnowLeopard Intel Release',
+ 'built_revision': None,
+ 'activity': 'building',
+ 'pending_builds': 0,
+ },
+ {
+ 'is_green': False,
+ 'build_number' : 654,
+ 'name': u'Qt Linux Release',
+ 'built_revision': 47383,
+ 'activity': 'idle',
+ 'pending_builds': 3,
+ },
+ {
+ 'is_green': True,
+ 'build_number' : 2090,
+ 'name': u'Qt Windows 32-bit Debug',
+ 'built_revision': 60563,
+ 'activity': 'building',
+ 'pending_builds': 0,
+ },
+ ]
+
+ def test_status_parsing(self):
+ buildbot = BuildBot()
+
+ soup = BeautifulSoup(self._example_one_box_status)
+ status_table = soup.find("table")
+ input_rows = status_table.findAll('tr')
+
+ for x in range(len(input_rows)):
+ status_row = input_rows[x]
+ expected_parsing = self._expected_example_one_box_parsings[x]
+
+ builder = buildbot._parse_builder_status_from_row(status_row)
+
+ # Make sure we aren't parsing more or less than we expect
+ self.assertEquals(builder.keys(), expected_parsing.keys())
+
+ for key, expected_value in expected_parsing.items():
+ self.assertEquals(builder[key], expected_value, ("Builder %d parse failure for key: %s: Actual='%s' Expected='%s'" % (x, key, builder[key], expected_value)))
+
+ def test_core_builder_methods(self):
+ buildbot = BuildBot()
+
+ # Override builder_statuses function to not touch the network.
+ def example_builder_statuses(): # We could use instancemethod() to bind 'self' but we don't need to.
+ return BuildBotTest._expected_example_one_box_parsings
+ buildbot.builder_statuses = example_builder_statuses
+
+ buildbot.core_builder_names_regexps = [ 'Leopard', "Windows.*Build" ]
+ self.assertEquals(buildbot.red_core_builders_names(), [])
+ self.assertTrue(buildbot.core_builders_are_green())
+
+ buildbot.core_builder_names_regexps = [ 'SnowLeopard', 'Qt' ]
+ self.assertEquals(buildbot.red_core_builders_names(), [ u'SnowLeopard Intel Release', u'Qt Linux Release' ])
+ self.assertFalse(buildbot.core_builders_are_green())
+
+ def test_builder_name_regexps(self):
+ buildbot = BuildBot()
+
+ # For complete testing, this list should match the list of builders at build.webkit.org:
+ example_builders = [
+ {'name': u'Leopard Intel Release (Build)', },
+ {'name': u'Leopard Intel Release (Tests)', },
+ {'name': u'Leopard Intel Debug (Build)', },
+ {'name': u'Leopard Intel Debug (Tests)', },
+ {'name': u'SnowLeopard Intel Release (Build)', },
+ {'name': u'SnowLeopard Intel Release (Tests)', },
+ {'name': u'SnowLeopard Intel Release (WebKit2 Tests)', },
+ {'name': u'SnowLeopard Intel Leaks', },
+ {'name': u'Windows Release (Build)', },
+ {'name': u'Windows 7 Release (Tests)', },
+ {'name': u'Windows Debug (Build)', },
+ {'name': u'Windows XP Debug (Tests)', },
+ {'name': u'Windows 7 Release (WebKit2 Tests)', },
+ {'name': u'GTK Linux 32-bit Release', },
+ {'name': u'GTK Linux 64-bit Release', },
+ {'name': u'GTK Linux 64-bit Debug', },
+ {'name': u'Qt Linux Release', },
+ {'name': u'Qt Linux Release minimal', },
+ {'name': u'Qt Linux ARMv7 Release', },
+ {'name': u'Qt Windows 32-bit Release', },
+ {'name': u'Qt Windows 32-bit Debug', },
+ {'name': u'Chromium Android Release', },
+ {'name': u'Chromium Win Release', },
+ {'name': u'Chromium Win Release (Tests)', },
+ {'name': u'Chromium Mac Release', },
+ {'name': u'Chromium Mac Release (Tests)', },
+ {'name': u'Chromium Linux Release', },
+ {'name': u'Chromium Linux Release (Tests)', },
+ {'name': u'Leopard Intel Release (NRWT)', },
+ {'name': u'SnowLeopard Intel Release (NRWT)', },
+ {'name': u'New run-webkit-tests', },
+ {'name': u'WinCairo Debug (Build)', },
+ {'name': u'WinCE Release (Build)', },
+ {'name': u'EFL Linux Release (Build)', },
+ ]
+ name_regexps = [
+ "SnowLeopard.*Build",
+ "SnowLeopard.*\(Test",
+ "SnowLeopard.*\(WebKit2 Test",
+ "Leopard.*\((?:Build|Test)",
+ "Windows.*Build",
+ "Windows.*\(Test",
+ "WinCE",
+ "EFL",
+ "GTK.*32",
+ "GTK.*64",
+ "Qt",
+ "Chromium.*(Mac|Linux|Win).*Release$",
+ "Chromium.*(Mac|Linux|Win).*Release.*\(Tests",
+ ]
+ expected_builders = [
+ {'name': u'Leopard Intel Release (Build)', },
+ {'name': u'Leopard Intel Release (Tests)', },
+ {'name': u'Leopard Intel Debug (Build)', },
+ {'name': u'Leopard Intel Debug (Tests)', },
+ {'name': u'SnowLeopard Intel Release (Build)', },
+ {'name': u'SnowLeopard Intel Release (Tests)', },
+ {'name': u'SnowLeopard Intel Release (WebKit2 Tests)', },
+ {'name': u'Windows Release (Build)', },
+ {'name': u'Windows 7 Release (Tests)', },
+ {'name': u'Windows Debug (Build)', },
+ {'name': u'Windows XP Debug (Tests)', },
+ {'name': u'GTK Linux 32-bit Release', },
+ {'name': u'GTK Linux 64-bit Release', },
+ {'name': u'GTK Linux 64-bit Debug', },
+ {'name': u'Qt Linux Release', },
+ {'name': u'Qt Linux Release minimal', },
+ {'name': u'Qt Linux ARMv7 Release', },
+ {'name': u'Qt Windows 32-bit Release', },
+ {'name': u'Qt Windows 32-bit Debug', },
+ {'name': u'Chromium Win Release', },
+ {'name': u'Chromium Win Release (Tests)', },
+ {'name': u'Chromium Mac Release', },
+ {'name': u'Chromium Mac Release (Tests)', },
+ {'name': u'Chromium Linux Release', },
+ {'name': u'Chromium Linux Release (Tests)', },
+ {'name': u'WinCE Release (Build)', },
+ {'name': u'EFL Linux Release (Build)', },
+ ]
+
+ # This test should probably be updated if the default regexp list changes
+ self.assertEquals(buildbot.core_builder_names_regexps, name_regexps)
+
+ builders = buildbot._builder_statuses_with_names_matching_regexps(example_builders, name_regexps)
+ self.assertEquals(builders, expected_builders)
+
+ def test_builder_with_name(self):
+ buildbot = BuildBot()
+
+ builder = buildbot.builder_with_name("Test Builder")
+ self.assertEqual(builder.name(), "Test Builder")
+ self.assertEqual(builder.url(), "http://build.webkit.org/builders/Test%20Builder")
+ self.assertEqual(builder.url_encoded_name(), "Test%20Builder")
+ self.assertEqual(builder.results_url(), "http://build.webkit.org/results/Test%20Builder")
+
+ # Override _fetch_build_dictionary function to not touch the network.
+ def mock_fetch_build_dictionary(self, build_number):
+ build_dictionary = {
+ "sourceStamp": {
+ "revision" : 2 * build_number,
+ },
+ "number" : int(build_number),
+ "results" : build_number % 2, # 0 means pass
+ }
+ return build_dictionary
+ buildbot._fetch_build_dictionary = mock_fetch_build_dictionary
+
+ build = builder.build(10)
+ self.assertEqual(build.builder(), builder)
+ self.assertEqual(build.url(), "http://build.webkit.org/builders/Test%20Builder/builds/10")
+ self.assertEqual(build.results_url(), "http://build.webkit.org/results/Test%20Builder/r20%20%2810%29")
+ self.assertEqual(build.revision(), 20)
+ self.assertEqual(build.is_green(), True)
+
+ build = build.previous_build()
+ self.assertEqual(build.builder(), builder)
+ self.assertEqual(build.url(), "http://build.webkit.org/builders/Test%20Builder/builds/9")
+ self.assertEqual(build.results_url(), "http://build.webkit.org/results/Test%20Builder/r18%20%289%29")
+ self.assertEqual(build.revision(), 18)
+ self.assertEqual(build.is_green(), False)
+
+ self.assertEqual(builder.build(None), None)
+
+ _example_directory_listing = '''
+<h1>Directory listing for /results/SnowLeopard Intel Leaks/</h1>
+
+<table>
+ <tr class="alt">
+ <th>Filename</th>
+ <th>Size</th>
+ <th>Content type</th>
+ <th>Content encoding</th>
+ </tr>
+<tr class="directory ">
+ <td><a href="r47483%20%281%29/"><b>r47483 (1)/</b></a></td>
+ <td><b></b></td>
+ <td><b>[Directory]</b></td>
+ <td><b></b></td>
+</tr>
+<tr class="file alt">
+ <td><a href="r47484%20%282%29.zip">r47484 (2).zip</a></td>
+ <td>89K</td>
+ <td>[application/zip]</td>
+ <td></td>
+</tr>
+'''
+ _expected_files = [
+ {
+ "filename" : "r47483 (1)/",
+ "size" : "",
+ "type" : "[Directory]",
+ "encoding" : "",
+ },
+ {
+ "filename" : "r47484 (2).zip",
+ "size" : "89K",
+ "type" : "[application/zip]",
+ "encoding" : "",
+ },
+ ]
+
+ def test_parse_build_to_revision_map(self):
+ buildbot = BuildBot()
+ files = buildbot._parse_twisted_directory_listing(self._example_directory_listing)
+ self.assertEqual(self._expected_files, files)
+
+ # Revision, is_green
+ # Ordered from newest (highest number) to oldest.
+ fake_builder1 = [
+ [2, False],
+ [1, True],
+ ]
+ fake_builder2 = [
+ [2, False],
+ [1, True],
+ ]
+ fake_builders = [
+ fake_builder1,
+ fake_builder2,
+ ]
+ def _build_from_fake(self, fake_builder, index):
+ if index >= len(fake_builder):
+ return None
+ fake_build = fake_builder[index]
+ build = Build(
+ builder=fake_builder,
+ build_number=index,
+ revision=fake_build[0],
+ is_green=fake_build[1],
+ )
+ def mock_previous_build():
+ return self._build_from_fake(fake_builder, index + 1)
+ build.previous_build = mock_previous_build
+ return build
+
+ def _fake_builds_at_index(self, index):
+ return [self._build_from_fake(builder, index) for builder in self.fake_builders]
+
+ def test_last_green_revision(self):
+ buildbot = BuildBot()
+ def mock_builds_from_builders(only_core_builders):
+ return self._fake_builds_at_index(0)
+ buildbot._latest_builds_from_builders = mock_builds_from_builders
+ self.assertEqual(buildbot.last_green_revision(), 1)
+
+ def _fetch_build(self, build_number):
+ if build_number == 5:
+ return "correct build"
+ return "wrong build"
+
+ def _fetch_revision_to_build_map(self):
+ return {'r5': 5, 'r2': 2, 'r3': 3}
+
+ def test_latest_cached_build(self):
+ b = Builder('builder', BuildBot())
+ b._fetch_build = self._fetch_build
+ b._fetch_revision_to_build_map = self._fetch_revision_to_build_map
+ self.assertEquals("correct build", b.latest_cached_build())
+
+ def results_url(self):
+ return "some-url"
+
+ def test_results_zip_url(self):
+ b = Build(None, 123, 123, False)
+ b.results_url = self.results_url
+ self.assertEquals("some-url.zip", b.results_zip_url())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Tools/Scripts/webkitpy/common/net/buildbot/chromiumbuildbot.py b/Tools/Scripts/webkitpy/common/net/buildbot/chromiumbuildbot.py
new file mode 100644
index 000000000..5030bba48
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/buildbot/chromiumbuildbot.py
@@ -0,0 +1,47 @@
+# Copyright (c) 2011, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import webkitpy.common.config.urls as config_urls
+from webkitpy.common.net.buildbot.buildbot import Builder, BuildBot
+# FIXME: builders should probably be in webkitpy.common.config.
+from webkitpy.layout_tests.port.builders import builder_path_from_name
+
+
+class ChromiumBuilder(Builder):
+ # The build.chromium.org builders store their results in a different
+ # location than the build.webkit.org builders.
+ def results_url(self):
+ return "http://build.chromium.org/f/chromium/layout_test_results/%s" % builder_path_from_name(self._name)
+
+ def accumulated_results_url(self):
+ return self.results_url() + "/results/layout-test-results"
+
+
+class ChromiumBuildBot(BuildBot):
+ _builder_factory = ChromiumBuilder
+ _default_url = config_urls.chromium_buildbot_url
diff --git a/Tools/Scripts/webkitpy/common/net/credentials.py b/Tools/Scripts/webkitpy/common/net/credentials.py
new file mode 100644
index 000000000..d76405b39
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/credentials.py
@@ -0,0 +1,154 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Copyright (c) 2009 Apple Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# Python module for reading stored web credentials from the OS.
+
+import os
+import platform
+import re
+
+from webkitpy.common.checkout.scm import Git
+from webkitpy.common.system.executive import Executive, ScriptError
+from webkitpy.common.system.user import User
+from webkitpy.common.system.deprecated_logging import log
+
+try:
+ # Use keyring, a cross platform keyring interface, as a fallback:
+ # http://pypi.python.org/pypi/keyring
+ import keyring
+except ImportError:
+ keyring = None
+
+
+class Credentials(object):
+ _environ_prefix = "webkit_bugzilla_"
+
+ def __init__(self, host, git_prefix=None, executive=None, cwd=os.getcwd(),
+ keyring=keyring):
+ self.host = host
+ self.git_prefix = "%s." % git_prefix if git_prefix else ""
+ self.executive = executive or Executive()
+ self.cwd = cwd
+ self._keyring = keyring
+
+ def _credentials_from_git(self):
+ try:
+ if not Git.in_working_directory(self.cwd):
+ return (None, None)
+ return (Git.read_git_config(self.git_prefix + "username"),
+ Git.read_git_config(self.git_prefix + "password"))
+ except OSError, e:
+ # Catch and ignore OSError exceptions such as "no such file
+ # or directory" (OSError errno 2), which imply that the Git
+ # command cannot be found/is not installed.
+ pass
+ return (None, None)
+
+ def _keychain_value_with_label(self, label, source_text):
+ match = re.search("%s\"(?P<value>.+)\"" % label,
+ source_text,
+ re.MULTILINE)
+ if match:
+ return match.group('value')
+
+ def _is_mac_os_x(self):
+ return platform.mac_ver()[0]
+
+ def _parse_security_tool_output(self, security_output):
+ username = self._keychain_value_with_label("^\s*\"acct\"<blob>=",
+ security_output)
+ password = self._keychain_value_with_label("^password: ",
+ security_output)
+ return [username, password]
+
+ def _run_security_tool(self, username=None):
+ security_command = [
+ "/usr/bin/security",
+ "find-internet-password",
+ "-g",
+ "-s",
+ self.host,
+ ]
+ if username:
+ security_command += ["-a", username]
+
+ log("Reading Keychain for %s account and password. "
+ "Click \"Allow\" to continue..." % self.host)
+ try:
+ return self.executive.run_command(security_command)
+ except ScriptError:
+ # Failed to either find a keychain entry or somekind of OS-related
+ # error occured (for instance, couldn't find the /usr/sbin/security
+ # command).
+ log("Could not find a keychain entry for %s." % self.host)
+ return None
+
+ def _credentials_from_keychain(self, username=None):
+ if not self._is_mac_os_x():
+ return [username, None]
+
+ security_output = self._run_security_tool(username)
+ if security_output:
+ return self._parse_security_tool_output(security_output)
+ else:
+ return [None, None]
+
+ def _read_environ(self, key):
+ environ_key = self._environ_prefix + key
+ return os.environ.get(environ_key.upper())
+
+ def _credentials_from_environment(self):
+ return (self._read_environ("username"), self._read_environ("password"))
+
+ def _offer_to_store_credentials_in_keyring(self, username, password):
+ if not self._keyring:
+ return
+ if not User().confirm("Store password in system keyring?", User.DEFAULT_NO):
+ return
+ self._keyring.set_password(self.host, username, password)
+
+ def read_credentials(self):
+ username, password = self._credentials_from_environment()
+ # FIXME: We don't currently support pulling the username from one
+ # source and the password from a separate source.
+ if not username or not password:
+ username, password = self._credentials_from_git()
+ if not username or not password:
+ username, password = self._credentials_from_keychain(username)
+
+ if username and not password and self._keyring:
+ password = self._keyring.get_password(self.host, username)
+
+ if not username:
+ username = User.prompt("%s login: " % self.host)
+ if not password:
+ password = User.prompt_password("%s password for %s: " % (self.host, username))
+ self._offer_to_store_credentials_in_keyring(username, password)
+
+ return (username, password)
diff --git a/Tools/Scripts/webkitpy/common/net/credentials_unittest.py b/Tools/Scripts/webkitpy/common/net/credentials_unittest.py
new file mode 100644
index 000000000..59048591f
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/credentials_unittest.py
@@ -0,0 +1,186 @@
+# Copyright (C) 2009 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import with_statement
+
+import os
+import tempfile
+import unittest
+from webkitpy.common.net.credentials import Credentials
+from webkitpy.common.system.executive import Executive
+from webkitpy.common.system.outputcapture import OutputCapture
+from webkitpy.thirdparty.mock import Mock
+from webkitpy.tool.mocktool import MockOptions
+from webkitpy.common.system.executive_mock import MockExecutive
+
+
+# FIXME: Other unit tests probably want this class.
+class _TemporaryDirectory(object):
+ def __init__(self, **kwargs):
+ self._kwargs = kwargs
+ self._directory_path = None
+
+ def __enter__(self):
+ self._directory_path = tempfile.mkdtemp(**self._kwargs)
+ return self._directory_path
+
+ def __exit__(self, type, value, traceback):
+ os.rmdir(self._directory_path)
+
+
+# Note: All tests should use this class instead of Credentials directly to avoid using a real Executive.
+class MockedCredentials(Credentials):
+ def __init__(self, *args, **kwargs):
+ if 'executive' not in kwargs:
+ kwargs['executive'] = MockExecutive()
+ Credentials.__init__(self, *args, **kwargs)
+
+
+class CredentialsTest(unittest.TestCase):
+ example_security_output = """keychain: "/Users/test/Library/Keychains/login.keychain"
+class: "inet"
+attributes:
+ 0x00000007 <blob>="bugs.webkit.org (test@webkit.org)"
+ 0x00000008 <blob>=<NULL>
+ "acct"<blob>="test@webkit.org"
+ "atyp"<blob>="form"
+ "cdat"<timedate>=0x32303039303832353233353231365A00 "20090825235216Z\000"
+ "crtr"<uint32>=<NULL>
+ "cusi"<sint32>=<NULL>
+ "desc"<blob>="Web form password"
+ "icmt"<blob>="default"
+ "invi"<sint32>=<NULL>
+ "mdat"<timedate>=0x32303039303930393137323635315A00 "20090909172651Z\000"
+ "nega"<sint32>=<NULL>
+ "path"<blob>=<NULL>
+ "port"<uint32>=0x00000000
+ "prot"<blob>=<NULL>
+ "ptcl"<uint32>="htps"
+ "scrp"<sint32>=<NULL>
+ "sdmn"<blob>=<NULL>
+ "srvr"<blob>="bugs.webkit.org"
+ "type"<uint32>=<NULL>
+password: "SECRETSAUCE"
+"""
+
+ def test_keychain_lookup_on_non_mac(self):
+ class FakeCredentials(MockedCredentials):
+ def _is_mac_os_x(self):
+ return False
+ credentials = FakeCredentials("bugs.webkit.org")
+ self.assertEqual(credentials._is_mac_os_x(), False)
+ self.assertEqual(credentials._credentials_from_keychain("foo"), ["foo", None])
+
+ def test_security_output_parse(self):
+ credentials = MockedCredentials("bugs.webkit.org")
+ self.assertEqual(credentials._parse_security_tool_output(self.example_security_output), ["test@webkit.org", "SECRETSAUCE"])
+
+ def test_security_output_parse_entry_not_found(self):
+ # FIXME: This test won't work if the user has a credential for foo.example.com!
+ credentials = Credentials("foo.example.com")
+ if not credentials._is_mac_os_x():
+ return # This test does not run on a non-Mac.
+
+ # Note, we ignore the captured output because it is already covered
+ # by the test case CredentialsTest._assert_security_call (below).
+ outputCapture = OutputCapture()
+ outputCapture.capture_output()
+ self.assertEqual(credentials._run_security_tool(), None)
+ outputCapture.restore_output()
+
+ def _assert_security_call(self, username=None):
+ executive_mock = Mock()
+ credentials = MockedCredentials("example.com", executive=executive_mock)
+
+ expected_stderr = "Reading Keychain for example.com account and password. Click \"Allow\" to continue...\n"
+ OutputCapture().assert_outputs(self, credentials._run_security_tool, [username], expected_stderr=expected_stderr)
+
+ security_args = ["/usr/bin/security", "find-internet-password", "-g", "-s", "example.com"]
+ if username:
+ security_args += ["-a", username]
+ executive_mock.run_command.assert_called_with(security_args)
+
+ def test_security_calls(self):
+ self._assert_security_call()
+ self._assert_security_call(username="foo")
+
+ def test_credentials_from_environment(self):
+ credentials = MockedCredentials("example.com")
+
+ saved_environ = os.environ.copy()
+ os.environ['WEBKIT_BUGZILLA_USERNAME'] = "foo"
+ os.environ['WEBKIT_BUGZILLA_PASSWORD'] = "bar"
+ username, password = credentials._credentials_from_environment()
+ self.assertEquals(username, "foo")
+ self.assertEquals(password, "bar")
+ os.environ = saved_environ
+
+ def test_read_credentials_without_git_repo(self):
+ # FIXME: This should share more code with test_keyring_without_git_repo
+ class FakeCredentials(MockedCredentials):
+ def _is_mac_os_x(self):
+ return True
+
+ def _credentials_from_keychain(self, username):
+ return ("test@webkit.org", "SECRETSAUCE")
+
+ def _credentials_from_environment(self):
+ return (None, None)
+
+ with _TemporaryDirectory(suffix="not_a_git_repo") as temp_dir_path:
+ credentials = FakeCredentials("bugs.webkit.org", cwd=temp_dir_path)
+ # FIXME: Using read_credentials here seems too broad as higher-priority
+ # credential source could be affected by the user's environment.
+ self.assertEqual(credentials.read_credentials(), ("test@webkit.org", "SECRETSAUCE"))
+
+
+ def test_keyring_without_git_repo(self):
+ # FIXME: This should share more code with test_read_credentials_without_git_repo
+ class MockKeyring(object):
+ def get_password(self, host, username):
+ return "NOMNOMNOM"
+
+ class FakeCredentials(MockedCredentials):
+ def _is_mac_os_x(self):
+ return True
+
+ def _credentials_from_keychain(self, username):
+ return ("test@webkit.org", None)
+
+ def _credentials_from_environment(self):
+ return (None, None)
+
+ with _TemporaryDirectory(suffix="not_a_git_repo") as temp_dir_path:
+ credentials = FakeCredentials("fake.hostname", cwd=temp_dir_path, keyring=MockKeyring())
+ # FIXME: Using read_credentials here seems too broad as higher-priority
+ # credential source could be affected by the user's environment.
+ self.assertEqual(credentials.read_credentials(), ("test@webkit.org", "NOMNOMNOM"))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/Tools/Scripts/webkitpy/common/net/failuremap.py b/Tools/Scripts/webkitpy/common/net/failuremap.py
new file mode 100644
index 000000000..746242e14
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/failuremap.py
@@ -0,0 +1,88 @@
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+# FIXME: This probably belongs in the buildbot module.
+class FailureMap(object):
+ def __init__(self):
+ self._failures = []
+
+ def add_regression_window(self, builder, regression_window):
+ self._failures.append({
+ 'builder': builder,
+ 'regression_window': regression_window,
+ })
+
+ def is_empty(self):
+ return not self._failures
+
+ def failing_revisions(self):
+ failing_revisions = [failure_info['regression_window'].revisions()
+ for failure_info in self._failures]
+ return sorted(set(sum(failing_revisions, [])))
+
+ def builders_failing_for(self, revision):
+ return self._builders_failing_because_of([revision])
+
+ def tests_failing_for(self, revision):
+ tests = [failure_info['regression_window'].failing_tests()
+ for failure_info in self._failures
+ if revision in failure_info['regression_window'].revisions()
+ and failure_info['regression_window'].failing_tests()]
+ result = set()
+ for test in tests:
+ result = result.union(test)
+ return sorted(result)
+
+ def failing_tests(self):
+ return set(sum([self.tests_failing_for(revision) for revision in self.failing_revisions()], []))
+
+ def _old_failures(self, is_old_failure):
+ return filter(lambda revision: is_old_failure(revision),
+ self.failing_revisions())
+
+ def _builders_failing_because_of(self, revisions):
+ revision_set = set(revisions)
+ return [failure_info['builder'] for failure_info in self._failures
+ if revision_set.intersection(
+ failure_info['regression_window'].revisions())]
+
+ # FIXME: We should re-process old failures after some time delay.
+ # https://bugs.webkit.org/show_bug.cgi?id=36581
+ def filter_out_old_failures(self, is_old_failure):
+ old_failures = self._old_failures(is_old_failure)
+ old_failing_builder_names = set([builder.name()
+ for builder in self._builders_failing_because_of(old_failures)])
+
+ # We filter out all the failing builders that could have been caused
+ # by old_failures. We could miss some new failures this way, but
+ # emperically, this reduces the amount of spam we generate.
+ failures = self._failures
+ self._failures = [failure_info for failure_info in failures
+ if failure_info['builder'].name() not in old_failing_builder_names]
+ self._cache = {}
diff --git a/Tools/Scripts/webkitpy/common/net/failuremap_unittest.py b/Tools/Scripts/webkitpy/common/net/failuremap_unittest.py
new file mode 100644
index 000000000..9a66d9ede
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/failuremap_unittest.py
@@ -0,0 +1,80 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from webkitpy.common.net.buildbot import Build
+from webkitpy.common.net.failuremap import *
+from webkitpy.common.net.regressionwindow import RegressionWindow
+from webkitpy.common.net.buildbot.buildbot_mock import MockBuilder
+
+
+class FailureMapTest(unittest.TestCase):
+ builder1 = MockBuilder("Builder1")
+ builder2 = MockBuilder("Builder2")
+
+ build1a = Build(builder1, build_number=22, revision=1233, is_green=True)
+ build1b = Build(builder1, build_number=23, revision=1234, is_green=False)
+ build2a = Build(builder2, build_number=89, revision=1233, is_green=True)
+ build2b = Build(builder2, build_number=90, revision=1235, is_green=False)
+
+ regression_window1 = RegressionWindow(build1a, build1b, failing_tests=[u'test1', u'test1'])
+ regression_window2 = RegressionWindow(build2a, build2b, failing_tests=[u'test1'])
+
+ def _make_failure_map(self):
+ failure_map = FailureMap()
+ failure_map.add_regression_window(self.builder1, self.regression_window1)
+ failure_map.add_regression_window(self.builder2, self.regression_window2)
+ return failure_map
+
+ def test_failing_revisions(self):
+ failure_map = self._make_failure_map()
+ self.assertEquals(failure_map.failing_revisions(), [1234, 1235])
+
+ def test_new_failures(self):
+ failure_map = self._make_failure_map()
+ failure_map.filter_out_old_failures(lambda revision: False)
+ self.assertEquals(failure_map.failing_revisions(), [1234, 1235])
+
+ def test_new_failures_with_old_revisions(self):
+ failure_map = self._make_failure_map()
+ failure_map.filter_out_old_failures(lambda revision: revision == 1234)
+ self.assertEquals(failure_map.failing_revisions(), [])
+
+ def test_new_failures_with_more_old_revisions(self):
+ failure_map = self._make_failure_map()
+ failure_map.filter_out_old_failures(lambda revision: revision == 1235)
+ self.assertEquals(failure_map.failing_revisions(), [1234])
+
+ def test_tests_failing_for(self):
+ failure_map = self._make_failure_map()
+ self.assertEquals(failure_map.tests_failing_for(1234), [u'test1'])
+
+ def test_failing_tests(self):
+ failure_map = self._make_failure_map()
+ self.assertEquals(failure_map.failing_tests(), set([u'test1']))
diff --git a/Tools/Scripts/webkitpy/common/net/file_uploader.py b/Tools/Scripts/webkitpy/common/net/file_uploader.py
new file mode 100644
index 000000000..66e49e866
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/file_uploader.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import with_statement
+
+import codecs
+import mimetypes
+import socket
+import urllib2
+
+from webkitpy.common.net.networktransaction import NetworkTransaction
+
+
+def get_mime_type(filename):
+ return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# FIXME: Rather than taking tuples, this function should take more structured data.
+def _encode_multipart_form_data(fields, files):
+ """Encode form fields for multipart/form-data.
+
+ Args:
+ fields: A sequence of (name, value) elements for regular form fields.
+ files: A sequence of (name, filename, value) elements for data to be
+ uploaded as files.
+ Returns:
+ (content_type, body) ready for httplib.HTTP instance.
+
+ Source:
+ http://code.google.com/p/rietveld/source/browse/trunk/upload.py
+ """
+ BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+ CRLF = '\r\n'
+ lines = []
+
+ for key, value in fields:
+ lines.append('--' + BOUNDARY)
+ lines.append('Content-Disposition: form-data; name="%s"' % key)
+ lines.append('')
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ lines.append(value)
+
+ for key, filename, value in files:
+ lines.append('--' + BOUNDARY)
+ lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename))
+ lines.append('Content-Type: %s' % get_mime_type(filename))
+ lines.append('')
+ if isinstance(value, unicode):
+ value = value.encode('utf-8')
+ lines.append(value)
+
+ lines.append('--' + BOUNDARY + '--')
+ lines.append('')
+ body = CRLF.join(lines)
+ content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+ return content_type, body
+
+
+class FileUploader(object):
+ def __init__(self, url):
+ self._url = url
+
+ def _upload_files(self, attrs, file_objs):
+ # FIXME: We should use the same variable names for the formal and actual parameters.
+ content_type, data = _encode_multipart_form_data(attrs, file_objs)
+ headers = {
+ "Content-Type": content_type,
+ }
+ # FIXME: We should talk to the network via a Host object.
+ request = urllib2.Request(self._url, data, headers)
+ urllib2.urlopen(request)
+
+ def upload(self, params, files, timeout_seconds):
+ file_objs = []
+ for filename, path in files:
+ # FIXME: We should talk to the filesytem via a Host object.
+ with codecs.open(path, "rb") as file:
+ file_objs.append(('file', filename, file.read()))
+
+ orig_timeout = socket.getdefaulttimeout()
+ try:
+ # FIXME: We shouldn't mutate global static state.
+ socket.setdefaulttimeout(timeout_seconds)
+ NetworkTransaction(timeout_seconds=timeout_seconds).run(
+ lambda: self._upload_files(params, file_objs))
+ finally:
+ socket.setdefaulttimeout(orig_timeout)
diff --git a/Tools/Scripts/webkitpy/common/net/htdigestparser.py b/Tools/Scripts/webkitpy/common/net/htdigestparser.py
new file mode 100644
index 000000000..ee7d5405d
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/htdigestparser.py
@@ -0,0 +1,54 @@
+# Copyright (C) 2011 Apple Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# 1. Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""htdigestparser - a parser for htdigest files"""
+
+import hashlib
+import string
+
+
+class HTDigestParser(object):
+ def __init__(self, digest_file):
+ self._entries = self.parse_file(digest_file)
+
+ def authenticate(self, username, realm, password):
+ hashed_password = hashlib.md5(':'.join((username, realm, password))).hexdigest()
+ return [username, realm, hashed_password] in self.entries()
+
+ def entries(self):
+ return self._entries
+
+ def parse_file(self, digest_file):
+ entries = [line.rstrip().split(':') for line in digest_file]
+
+ # Perform some sanity-checking to ensure the file is valid.
+ valid_characters = set(string.hexdigits)
+ for entry in entries:
+ if len(entry) != 3:
+ return []
+ hashed_password = entry[-1]
+ if len(hashed_password) != 32:
+ return []
+ if not set(hashed_password).issubset(valid_characters):
+ return []
+
+ return entries
diff --git a/Tools/Scripts/webkitpy/common/net/htdigestparser_unittest.py b/Tools/Scripts/webkitpy/common/net/htdigestparser_unittest.py
new file mode 100644
index 000000000..a2a4ac938
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/htdigestparser_unittest.py
@@ -0,0 +1,82 @@
+# Copyright (C) 2011 Apple Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# 1. Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import StringIO
+import os
+import unittest
+
+from webkitpy.common.net.htdigestparser import HTDigestParser
+
+
+class HTDigestParserTest(unittest.TestCase):
+ def assertEntriesEqual(self, entries, additional_content=None):
+ digest_file = self.fake_htdigest_file()
+ if additional_content is not None:
+ digest_file.seek(pos=0, mode=os.SEEK_END)
+ digest_file.write(additional_content)
+ digest_file.seek(pos=0, mode=os.SEEK_SET)
+ self.assertEqual(entries, HTDigestParser(digest_file).entries())
+
+ def test_authenticate(self):
+ htdigest = HTDigestParser(self.fake_htdigest_file())
+ self.assertTrue(htdigest.authenticate('user1', 'realm 1', 'password1'))
+ self.assertTrue(htdigest.authenticate('user2', 'realm 2', 'password2'))
+ self.assertTrue(htdigest.authenticate('user3', 'realm 1', 'password3'))
+ self.assertTrue(htdigest.authenticate('user3', 'realm 3', 'password3'))
+
+ self.assertFalse(htdigest.authenticate('user1', 'realm', 'password1'))
+ self.assertFalse(htdigest.authenticate('user1', 'realm 2', 'password1'))
+ self.assertFalse(htdigest.authenticate('user2', 'realm 2', 'password1'))
+ self.assertFalse(htdigest.authenticate('user2', 'realm 1', 'password1'))
+ self.assertFalse(htdigest.authenticate('', '', ''))
+
+ def test_entries(self):
+ entries = [
+ ['user1', 'realm 1', '36b8aa27fa5e9051095d37b619f92762'],
+ ['user2', 'realm 2', '14f827686fa97778f02fe1314a3337c8'],
+ ['user3', 'realm 1', '1817fc8a24119cc57fbafc8a630ea5a5'],
+ ['user3', 'realm 3', 'a05f5a2335e9d87bbe75bbe5e53248f0'],
+ ]
+ self.assertEntriesEqual(entries)
+ self.assertEntriesEqual(entries, additional_content='')
+
+ def test_empty_file(self):
+ self.assertEqual([], HTDigestParser(StringIO.StringIO()).entries())
+
+ def test_too_few_colons(self):
+ self.assertEntriesEqual([], additional_content='user1:realm 1\n')
+
+ def test_too_many_colons(self):
+ self.assertEntriesEqual([], additional_content='user1:realm 1:36b8aa27fa5e9051095d37b619f92762:garbage\n')
+
+ def test_invalid_hash(self):
+ self.assertEntriesEqual([], additional_content='user1:realm 1:36b8aa27fa5e9051095d37b619f92762000000\n')
+ self.assertEntriesEqual([], additional_content='user1:realm 1:36b8aa27fa5e9051095d37b619f9276\n')
+ self.assertEntriesEqual([], additional_content='user1:realm 1:36b8aa27fa5e9051095d37b619f9276z\n')
+ self.assertEntriesEqual([], additional_content='user1:realm 1: 36b8aa27fa5e9051095d37b619f92762\n')
+
+ def fake_htdigest_file(self):
+ return StringIO.StringIO("""user1:realm 1:36b8aa27fa5e9051095d37b619f92762
+user2:realm 2:14f827686fa97778f02fe1314a3337c8
+user3:realm 1:1817fc8a24119cc57fbafc8a630ea5a5
+user3:realm 3:a05f5a2335e9d87bbe75bbe5e53248f0
+""")
diff --git a/Tools/Scripts/webkitpy/common/net/irc/__init__.py b/Tools/Scripts/webkitpy/common/net/irc/__init__.py
new file mode 100644
index 000000000..ef65bee5b
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/irc/__init__.py
@@ -0,0 +1 @@
+# Required for Python to search this directory for module files
diff --git a/Tools/Scripts/webkitpy/common/net/irc/irc_mock.py b/Tools/Scripts/webkitpy/common/net/irc/irc_mock.py
new file mode 100644
index 000000000..734be0670
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/irc/irc_mock.py
@@ -0,0 +1,37 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from webkitpy.common.system.deprecated_logging import log
+
+
+class MockIRC(object):
+ def post(self, message):
+ log("MOCK: irc.post: %s" % message)
+
+ def disconnect(self):
+ log("MOCK: irc.disconnect")
diff --git a/Tools/Scripts/webkitpy/common/net/irc/ircbot.py b/Tools/Scripts/webkitpy/common/net/irc/ircbot.py
new file mode 100644
index 000000000..061a43cf6
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/irc/ircbot.py
@@ -0,0 +1,91 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from webkitpy.common.config import irc as config_irc
+
+from webkitpy.common.thread.messagepump import MessagePump, MessagePumpDelegate
+from webkitpy.thirdparty.autoinstalled.irc import ircbot
+from webkitpy.thirdparty.autoinstalled.irc import irclib
+
+
+class IRCBotDelegate(object):
+ def irc_message_received(self, nick, message):
+ raise NotImplementedError, "subclasses must implement"
+
+ def irc_nickname(self):
+ raise NotImplementedError, "subclasses must implement"
+
+ def irc_password(self):
+ raise NotImplementedError, "subclasses must implement"
+
+
+class IRCBot(ircbot.SingleServerIRCBot, MessagePumpDelegate):
+ # FIXME: We should get this information from a config file.
+ def __init__(self,
+ message_queue,
+ delegate):
+ self._message_queue = message_queue
+ self._delegate = delegate
+ ircbot.SingleServerIRCBot.__init__(
+ self,
+ [(
+ config_irc.server,
+ config_irc.port,
+ self._delegate.irc_password()
+ )],
+ self._delegate.irc_nickname(),
+ self._delegate.irc_nickname())
+ self._channel = config_irc.channel
+
+ # ircbot.SingleServerIRCBot methods
+
+ def on_nicknameinuse(self, connection, event):
+ connection.nick(connection.get_nickname() + "_")
+
+ def on_welcome(self, connection, event):
+ connection.join(self._channel)
+ self._message_pump = MessagePump(self, self._message_queue)
+
+ def on_pubmsg(self, connection, event):
+ nick = irclib.nm_to_n(event.source())
+ request = event.arguments()[0].split(":", 1)
+ if len(request) > 1 and irclib.irc_lower(request[0]) == irclib.irc_lower(self.connection.get_nickname()):
+ response = self._delegate.irc_message_received(nick, request[1])
+ if response:
+ connection.privmsg(self._channel, response)
+
+ # MessagePumpDelegate methods
+
+ def schedule(self, interval, callback):
+ self.connection.execute_delayed(interval, callback)
+
+ def message_available(self, message):
+ self.connection.privmsg(self._channel, message)
+
+ def final_message_delivered(self):
+ self.die()
diff --git a/Tools/Scripts/webkitpy/common/net/irc/ircproxy.py b/Tools/Scripts/webkitpy/common/net/irc/ircproxy.py
new file mode 100644
index 000000000..13348b4af
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/irc/ircproxy.py
@@ -0,0 +1,62 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import threading
+
+from webkitpy.common.net.irc.ircbot import IRCBot
+from webkitpy.common.thread.threadedmessagequeue import ThreadedMessageQueue
+from webkitpy.common.system.deprecated_logging import log
+
+
+class _IRCThread(threading.Thread):
+ def __init__(self, message_queue, irc_delegate, irc_bot):
+ threading.Thread.__init__(self)
+ self.setDaemon(True)
+ self._message_queue = message_queue
+ self._irc_delegate = irc_delegate
+ self._irc_bot = irc_bot
+
+ def run(self):
+ bot = self._irc_bot(self._message_queue, self._irc_delegate)
+ bot.start()
+
+
+class IRCProxy(object):
+ def __init__(self, irc_delegate, irc_bot=IRCBot):
+ log("Connecting to IRC")
+ self._message_queue = ThreadedMessageQueue()
+ self._child_thread = _IRCThread(self._message_queue, irc_delegate, irc_bot)
+ self._child_thread.start()
+
+ def post(self, message):
+ self._message_queue.post(message)
+
+ def disconnect(self):
+ log("Disconnecting from IRC...")
+ self._message_queue.stop()
+ self._child_thread.join()
diff --git a/Tools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py b/Tools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py
new file mode 100644
index 000000000..b44ce400b
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/irc/ircproxy_unittest.py
@@ -0,0 +1,43 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from webkitpy.common.net.irc.ircproxy import IRCProxy
+from webkitpy.common.system.outputcapture import OutputCapture
+from webkitpy.thirdparty.mock import Mock
+
+class IRCProxyTest(unittest.TestCase):
+ def test_trivial(self):
+ def fun():
+ proxy = IRCProxy(Mock(), Mock())
+ proxy.post("hello")
+ proxy.disconnect()
+
+ expected_stderr = "Connecting to IRC\nDisconnecting from IRC...\n"
+ OutputCapture().assert_outputs(self, fun, expected_stderr=expected_stderr)
diff --git a/Tools/Scripts/webkitpy/common/net/layouttestresults.py b/Tools/Scripts/webkitpy/common/net/layouttestresults.py
new file mode 100644
index 000000000..05f8215d0
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/layouttestresults.py
@@ -0,0 +1,175 @@
+# Copyright (c) 2010, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# A module for parsing results.html files generated by old-run-webkit-tests
+# This class is one big hack and only needs to exist until we transition to new-run-webkit-tests.
+
+from webkitpy.common.net.resultsjsonparser import ResultsJSONParser
+from webkitpy.common.system.deprecated_logging import log
+from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup, SoupStrainer
+from webkitpy.layout_tests.models import test_results
+from webkitpy.layout_tests.models import test_failures
+
+
+# FIXME: This should be unified with all the layout test results code in the layout_tests package
+# This doesn't belong in common.net, but we don't have a better place for it yet.
+def path_for_layout_test(test_name):
+ return "LayoutTests/%s" % test_name
+
+
+class ORWTResultsHTMLParser(object):
+ """This class knows how to parse old-run-webkit-tests results.html files."""
+
+ stderr_key = u'Tests that had stderr output:'
+ fail_key = u'Tests where results did not match expected results:'
+ timeout_key = u'Tests that timed out:'
+ # FIXME: This may need to be made aware of WebKitTestRunner results for WebKit2.
+ crash_key = u'Tests that caused the DumpRenderTree tool to crash:'
+ missing_key = u'Tests that had no expected results (probably new):'
+ webprocess_crash_key = u'Tests that caused the Web process to crash:'
+
+ expected_keys = [
+ stderr_key,
+ fail_key,
+ crash_key,
+ webprocess_crash_key,
+ timeout_key,
+ missing_key,
+ ]
+
+ @classmethod
+ def _failures_from_fail_row(self, row):
+ # Look at all anchors in this row, and guess what type
+ # of new-run-webkit-test failures they equate to.
+ failures = set()
+ test_name = None
+ for anchor in row.findAll("a"):
+ anchor_text = unicode(anchor.string)
+ if not test_name:
+ test_name = anchor_text
+ continue
+ if anchor_text in ["expected image", "image diffs"] or '%' in anchor_text:
+ failures.add(test_failures.FailureImageHashMismatch())
+ elif anchor_text in ["expected", "actual", "diff", "pretty diff"]:
+ failures.add(test_failures.FailureTextMismatch())
+ else:
+ log("Unhandled link text in results.html parsing: %s. Please file a bug against webkitpy." % anchor_text)
+ # FIXME: Its possible the row contained no links due to ORWT brokeness.
+ # We should probably assume some type of failure anyway.
+ return failures
+
+ @classmethod
+ def _failures_from_row(cls, row, table_title):
+ if table_title == cls.fail_key:
+ return cls._failures_from_fail_row(row)
+ if table_title == cls.crash_key:
+ return [test_failures.FailureCrash()]
+ if table_title == cls.webprocess_crash_key:
+ return [test_failures.FailureCrash(process_name="WebProcess")]
+ if table_title == cls.timeout_key:
+ return [test_failures.FailureTimeout()]
+ if table_title == cls.missing_key:
+ return [test_failures.FailureMissingResult(), test_failures.FailureMissingImageHash(), test_failures.FailureMissingImage()]
+ return None
+
+ @classmethod
+ def _test_result_from_row(cls, row, table_title):
+ test_name = unicode(row.find("a").string)
+ failures = cls._failures_from_row(row, table_title)
+ # TestResult is a class designed to work with new-run-webkit-tests.
+ # old-run-webkit-tests does not save quite enough information in results.html for us to parse.
+ # FIXME: It's unclear if test_name should include LayoutTests or not.
+ return test_results.TestResult(test_name, failures)
+
+ @classmethod
+ def _parse_results_table(cls, table):
+ table_title = unicode(table.findPreviousSibling("p").string)
+ if table_title not in cls.expected_keys:
+ # This Exception should only ever be hit if run-webkit-tests changes its results.html format.
+ raise Exception("Unhandled title: %s" % table_title)
+ # Ignore stderr failures. Everyone ignores them anyway.
+ if table_title == cls.stderr_key:
+ return []
+ # FIXME: We might end with two TestResults object for the same test if it appears in more than one row.
+ return [cls._test_result_from_row(row, table_title) for row in table.findAll("tr")]
+
+ @classmethod
+ def parse_results_html(cls, page):
+ tables = BeautifulSoup(page).findAll("table")
+ return sum([cls._parse_results_table(table) for table in tables], [])
+
+
+# FIXME: This should be unified with ResultsSummary or other NRWT layout tests code
+# in the layout_tests package.
+# This doesn't belong in common.net, but we don't have a better place for it yet.
+class LayoutTestResults(object):
+ @classmethod
+ def results_from_string(cls, string):
+ if not string:
+ return None
+ # For now we try to parse first as json, then as results.html
+ # eventually we will remove the html fallback support.
+ test_results = ResultsJSONParser.parse_results_json(string)
+ if not test_results:
+ test_results = ORWTResultsHTMLParser.parse_results_html(string)
+ if not test_results:
+ return None
+ return cls(test_results)
+
+ def __init__(self, test_results):
+ self._test_results = test_results
+ self._failure_limit_count = None
+
+ # FIXME: run-webkit-tests should store the --exit-after-N-failures value
+ # (or some indication of early exit) somewhere in the results.html/results.json
+ # file. Until it does, callers should set the limit to
+ # --exit-after-N-failures value used in that run. Consumers of LayoutTestResults
+ # may use that value to know if absence from the failure list means PASS.
+ # https://bugs.webkit.org/show_bug.cgi?id=58481
+ def set_failure_limit_count(self, limit):
+ self._failure_limit_count = limit
+
+ def failure_limit_count(self):
+ return self._failure_limit_count
+
+ def test_results(self):
+ return self._test_results
+
+ def results_matching_failure_types(self, failure_types):
+ return [result for result in self._test_results if result.has_failure_matching_types(*failure_types)]
+
+ def tests_matching_failure_types(self, failure_types):
+ return [result.test_name for result in self.results_matching_failure_types(failure_types)]
+
+ def failing_test_results(self):
+ # These should match the "fail", "crash", and "timeout" keys.
+ failure_types = [test_failures.FailureTextMismatch, test_failures.FailureImageHashMismatch, test_failures.FailureCrash, test_failures.FailureTimeout]
+ return self.results_matching_failure_types(failure_types)
+
+ def failing_tests(self):
+ return [result.test_name for result in self.failing_test_results()]
diff --git a/Tools/Scripts/webkitpy/common/net/layouttestresults_unittest.py b/Tools/Scripts/webkitpy/common/net/layouttestresults_unittest.py
new file mode 100644
index 000000000..09352a275
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/layouttestresults_unittest.py
@@ -0,0 +1,146 @@
+# Copyright (c) 2010, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from webkitpy.common.net.layouttestresults import LayoutTestResults, ORWTResultsHTMLParser
+from webkitpy.common.system.outputcapture import OutputCapture
+from webkitpy.layout_tests.models import test_results
+from webkitpy.layout_tests.models import test_failures
+from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup
+
+
+class ORWTResultsHTMLParserTest(unittest.TestCase):
+ _example_results_html = """
+<html>
+<head>
+<title>Layout Test Results</title>
+</head>
+<body>
+<p>Tests that had stderr output:</p>
+<table>
+<tr>
+<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/accessibility/aria-activedescendant-crash.html">accessibility/aria-activedescendant-crash.html</a></td>
+<td><a href="accessibility/aria-activedescendant-crash-stderr.txt">stderr</a></td>
+</tr>
+<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/http/tests/security/canvas-remote-read-svg-image.html">http/tests/security/canvas-remote-read-svg-image.html</a></td>
+<td><a href="http/tests/security/canvas-remote-read-svg-image-stderr.txt">stderr</a></td>
+</tr>
+</table><p>Tests that had no expected results (probably new):</p>
+<table>
+<tr>
+<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/fast/repaint/no-caret-repaint-in-non-content-editable-element.html">fast/repaint/no-caret-repaint-in-non-content-editable-element.html</a></td>
+<td><a href="fast/repaint/no-caret-repaint-in-non-content-editable-element-actual.txt">result</a></td>
+</tr>
+</table></body>
+</html>
+"""
+
+ _example_results_html_with_failing_tests = """
+<html>
+<head>
+<title>Layout Test Results</title>
+</head>
+<body>
+<p>Tests where results did not match expected results:</p>
+<table>
+<tr>
+<td><a href="http://trac.webkit.org/export/91245/trunk/LayoutTests/compositing/plugins/composited-plugin.html">compositing/plugins/composited-plugin.html</a></td>
+<td>
+<a href="compositing/plugins/composited-plugin-expected.txt">expected</a>
+</td>
+<td>
+<a href="compositing/plugins/composited-plugin-actual.txt">actual</a>
+</td>
+<td>
+<a href="compositing/plugins/composited-plugin-diffs.txt">diff</a>
+</td>
+<td>
+<a href="compositing/plugins/composited-plugin-pretty-diff.html">pretty diff</a>
+</td>
+</tr>
+</table>
+<p>Tests that had stderr output:</p>
+<table>
+<tr>
+<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/accessibility/aria-activedescendant-crash.html">accessibility/aria-activedescendant-crash.html</a></td>
+<td><a href="accessibility/aria-activedescendant-crash-stderr.txt">stderr</a></td>
+</tr>
+<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/http/tests/security/canvas-remote-read-svg-image.html">http/tests/security/canvas-remote-read-svg-image.html</a></td>
+<td><a href="http/tests/security/canvas-remote-read-svg-image-stderr.txt">stderr</a></td>
+</tr>
+</table><p>Tests that had no expected results (probably new):</p>
+<table>
+<tr>
+<td><a href="/var/lib/buildbot/build/gtk-linux-64-release/build/LayoutTests/fast/repaint/no-caret-repaint-in-non-content-editable-element.html">fast/repaint/no-caret-repaint-in-non-content-editable-element.html</a></td>
+<td><a href="fast/repaint/no-caret-repaint-in-non-content-editable-element-actual.txt">result</a></td>
+</tr>
+</table></body>
+</html>
+"""
+
+ def test_parse_layout_test_results(self):
+ failures = [test_failures.FailureMissingResult(), test_failures.FailureMissingImageHash(), test_failures.FailureMissingImage()]
+ testname = 'fast/repaint/no-caret-repaint-in-non-content-editable-element.html'
+ expected_results = [test_results.TestResult(testname, failures)]
+
+ results = ORWTResultsHTMLParser.parse_results_html(self._example_results_html)
+ self.assertEqual(expected_results, results)
+
+
+ def test_failures_from_fail_row(self):
+ row = BeautifulSoup("<tr><td><a>test.hml</a></td><td><a>expected image</a></td><td><a>25%</a></td></tr>")
+ test_name = unicode(row.find("a").string)
+ # Even if the caller has already found the test name, findAll inside _failures_from_fail_row will see it again.
+ failures = OutputCapture().assert_outputs(self, ORWTResultsHTMLParser._failures_from_fail_row, [row])
+ self.assertEqual(len(failures), 1)
+ self.assertEqual(type(sorted(failures)[0]), test_failures.FailureImageHashMismatch)
+
+ row = BeautifulSoup("<tr><td><a>test.hml</a><a>foo</a></td></tr>")
+ expected_stderr = "Unhandled link text in results.html parsing: foo. Please file a bug against webkitpy.\n"
+ OutputCapture().assert_outputs(self, ORWTResultsHTMLParser._failures_from_fail_row, [row], expected_stderr=expected_stderr)
+
+
+class LayoutTestResultsTest(unittest.TestCase):
+
+ def test_set_failure_limit_count(self):
+ results = LayoutTestResults([])
+ self.assertEquals(results.failure_limit_count(), None)
+ results.set_failure_limit_count(10)
+ self.assertEquals(results.failure_limit_count(), 10)
+
+ def test_results_from_string(self):
+ self.assertEqual(LayoutTestResults.results_from_string(None), None)
+ self.assertEqual(LayoutTestResults.results_from_string(""), None)
+ results = LayoutTestResults.results_from_string(ORWTResultsHTMLParserTest._example_results_html)
+ self.assertEqual(len(results.failing_tests()), 0)
+
+ def test_tests_matching_failure_types(self):
+ results = LayoutTestResults.results_from_string(ORWTResultsHTMLParserTest._example_results_html_with_failing_tests)
+ failing_tests = results.tests_matching_failure_types([test_failures.FailureTextMismatch])
+ self.assertEqual(len(results.failing_tests()), 1)
diff --git a/Tools/Scripts/webkitpy/common/net/networktransaction.py b/Tools/Scripts/webkitpy/common/net/networktransaction.py
new file mode 100644
index 000000000..c77989ba2
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/networktransaction.py
@@ -0,0 +1,70 @@
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import logging
+import time
+import urllib2
+
+from webkitpy.common.system.deprecated_logging import log
+
+
+_log = logging.getLogger(__name__)
+
+
+class NetworkTimeout(Exception):
+ pass
+
+
+class NetworkTransaction(object):
+ def __init__(self, initial_backoff_seconds=10, grown_factor=1.5, timeout_seconds=(10 * 60), convert_404_to_None=False):
+ self._initial_backoff_seconds = initial_backoff_seconds
+ self._grown_factor = grown_factor
+ self._timeout_seconds = timeout_seconds
+ self._convert_404_to_None = convert_404_to_None
+
+ def run(self, request):
+ self._total_sleep = 0
+ self._backoff_seconds = self._initial_backoff_seconds
+ while True:
+ try:
+ return request()
+ except urllib2.HTTPError, e:
+ if self._convert_404_to_None and e.code == 404:
+ return None
+ self._check_for_timeout()
+ _log.warn("Received HTTP status %s loading \"%s\". Retrying in %s seconds..." % (e.code, e.filename, self._backoff_seconds))
+ self._sleep()
+
+ def _check_for_timeout(self):
+ if self._total_sleep + self._backoff_seconds > self._timeout_seconds:
+ raise NetworkTimeout()
+
+ def _sleep(self):
+ time.sleep(self._backoff_seconds)
+ self._total_sleep += self._backoff_seconds
+ self._backoff_seconds *= self._grown_factor
diff --git a/Tools/Scripts/webkitpy/common/net/networktransaction_unittest.py b/Tools/Scripts/webkitpy/common/net/networktransaction_unittest.py
new file mode 100644
index 000000000..3302dec80
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/networktransaction_unittest.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from webkitpy.common.net.networktransaction import NetworkTransaction, NetworkTimeout
+from webkitpy.common.system.logtesting import LoggingTestCase
+
+
+class NetworkTransactionTest(LoggingTestCase):
+ exception = Exception("Test exception")
+
+ def test_success(self):
+ transaction = NetworkTransaction()
+ self.assertEqual(transaction.run(lambda: 42), 42)
+
+ def _raise_exception(self):
+ raise self.exception
+
+ def test_exception(self):
+ transaction = NetworkTransaction()
+ did_process_exception = False
+ did_throw_exception = True
+ try:
+ transaction.run(lambda: self._raise_exception())
+ did_throw_exception = False
+ except Exception, e:
+ did_process_exception = True
+ self.assertEqual(e, self.exception)
+ self.assertTrue(did_throw_exception)
+ self.assertTrue(did_process_exception)
+
+ def _raise_500_error(self):
+ self._run_count += 1
+ if self._run_count < 3:
+ from webkitpy.thirdparty.autoinstalled.mechanize import HTTPError
+ raise HTTPError("http://example.com/", 500, "internal server error", None, None)
+ return 42
+
+ def _raise_404_error(self):
+ from webkitpy.thirdparty.autoinstalled.mechanize import HTTPError
+ raise HTTPError("http://foo.com/", 404, "not found", None, None)
+
+ def test_retry(self):
+ self._run_count = 0
+ transaction = NetworkTransaction(initial_backoff_seconds=0)
+ self.assertEqual(transaction.run(lambda: self._raise_500_error()), 42)
+ self.assertEqual(self._run_count, 3)
+ self.assertLog(['WARNING: Received HTTP status 500 loading "http://example.com/". '
+ 'Retrying in 0 seconds...\n',
+ 'WARNING: Received HTTP status 500 loading "http://example.com/". '
+ 'Retrying in 0.0 seconds...\n'])
+
+ def test_convert_404_to_None(self):
+ transaction = NetworkTransaction(convert_404_to_None=True)
+ self.assertEqual(transaction.run(lambda: self._raise_404_error()), None)
+
+ def test_timeout(self):
+ self._run_count = 0
+ transaction = NetworkTransaction(initial_backoff_seconds=60*60, timeout_seconds=60)
+ did_process_exception = False
+ did_throw_exception = True
+ try:
+ transaction.run(lambda: self._raise_500_error())
+ did_throw_exception = False
+ except NetworkTimeout, e:
+ did_process_exception = True
+ self.assertTrue(did_throw_exception)
+ self.assertTrue(did_process_exception)
diff --git a/Tools/Scripts/webkitpy/common/net/regressionwindow.py b/Tools/Scripts/webkitpy/common/net/regressionwindow.py
new file mode 100644
index 000000000..3960ba276
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/regressionwindow.py
@@ -0,0 +1,52 @@
+# Copyright (C) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+# FIXME: This probably belongs in the buildbot module.
+class RegressionWindow(object):
+ def __init__(self, build_before_failure, failing_build, failing_tests=None):
+ self._build_before_failure = build_before_failure
+ self._failing_build = failing_build
+ self._failing_tests = failing_tests
+ self._revisions = None
+
+ def build_before_failure(self):
+ return self._build_before_failure
+
+ def failing_build(self):
+ return self._failing_build
+
+ def failing_tests(self):
+ return self._failing_tests
+
+ def revisions(self):
+ # Cache revisions to avoid excessive allocations.
+ if not self._revisions:
+ self._revisions = range(self._failing_build.revision(), self._build_before_failure.revision(), -1)
+ self._revisions.reverse()
+ return self._revisions
diff --git a/Tools/Scripts/webkitpy/common/net/resultsjsonparser.py b/Tools/Scripts/webkitpy/common/net/resultsjsonparser.py
new file mode 100644
index 000000000..6120713eb
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/resultsjsonparser.py
@@ -0,0 +1,152 @@
+# Copyright (c) 2010, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+try:
+ import json
+except ImportError:
+ # python 2.5 compatibility
+ import webkitpy.thirdparty.simplejson as json
+
+from webkitpy.common.memoized import memoized
+from webkitpy.common.system.deprecated_logging import log
+# FIXME: common should never import from new-run-webkit-tests, one of these files needs to move.
+from webkitpy.layout_tests.layout_package import json_results_generator
+from webkitpy.layout_tests.models import test_expectations, test_results, test_failures
+
+
+# These are helper functions for navigating the results json structure.
+def for_each_test(tree, handler, prefix=''):
+ for key in tree:
+ new_prefix = (prefix + '/' + key) if prefix else key
+ if 'actual' not in tree[key]:
+ for_each_test(tree[key], handler, new_prefix)
+ else:
+ handler(new_prefix, tree[key])
+
+
+def result_for_test(tree, test):
+ parts = test.split('/')
+ for part in parts:
+ tree = tree[part]
+ return tree
+
+
+# Wrapper around the dictionaries returned from the json.
+# Eventually the .json should just serialize the TestFailure objects
+# directly and we won't need this.
+class JSONTestResult(object):
+ def __init__(self, test_name, result_dict):
+ self._test_name = test_name
+ self._result_dict = result_dict
+
+ def did_pass_or_run_as_expected(self):
+ return self.did_pass() or self.did_run_as_expected()
+
+ def did_pass(self):
+ return test_expectations.PASS in self._actual_as_tokens()
+
+ def did_run_as_expected(self):
+ actual_results = self._actual_as_tokens()
+ expected_results = self._expected_as_tokens()
+ # FIXME: We should only call remove_pixel_failures when this JSONResult
+ # came from a test run without pixel tests!
+ if not test_expectations.has_pixel_failures(actual_results):
+ expected_results = test_expectations.remove_pixel_failures(expected_results)
+ for actual_result in actual_results:
+ if not test_expectations.result_was_expected(actual_result, expected_results, False, False):
+ return False
+ return True
+
+ def _tokenize(self, results_string):
+ tokens = map(test_expectations.TestExpectations.expectation_from_string, results_string.split(' '))
+ if None in tokens:
+ log("Unrecognized result in %s" % results_string)
+ return set(tokens)
+
+ @memoized
+ def _actual_as_tokens(self):
+ actual_results = self._result_dict['actual']
+ return self._tokenize(actual_results)
+
+ @memoized
+ def _expected_as_tokens(self):
+ actual_results = self._result_dict['expected']
+ return self._tokenize(actual_results)
+
+ def _failure_types_from_actual_result(self, actual):
+ # FIXME: There doesn't seem to be a full list of all possible values of
+ # 'actual' anywhere. However JSONLayoutResultsGenerator.FAILURE_TO_CHAR
+ # is a useful reference as that's for "old" style results.json files
+ if actual == test_expectations.PASS:
+ return []
+ elif actual == test_expectations.TEXT:
+ return [test_failures.FailureTextMismatch()]
+ elif actual == test_expectations.IMAGE:
+ return [test_failures.FailureImageHashMismatch()]
+ elif actual == test_expectations.IMAGE_PLUS_TEXT:
+ return [test_failures.FailureImageHashMismatch(), test_failures.FailureTextMismatch()]
+ elif actual == test_expectations.AUDIO:
+ return [test_failures.FailureAudioMismatch()]
+ elif actual == test_expectations.TIMEOUT:
+ return [test_failures.FailureTimeout()]
+ elif actual == test_expectations.CRASH:
+ # NOTE: We don't know what process crashed from the json, just that a process crashed.
+ return [test_failures.FailureCrash()]
+ elif actual == test_expectations.MISSING:
+ return [test_failures.FailureMissingResult(), test_failures.FailureMissingImageHash(), test_failures.FailureMissingImage()]
+ else:
+ log("Failed to handle: %s" % self._result_dict['actual'])
+ return []
+
+ def _failures(self):
+ if self.did_pass():
+ return []
+ return sum(map(self._failure_types_from_actual_result, self._actual_as_tokens()), [])
+
+ def test_result(self):
+ # FIXME: Optionally pull in the test runtime from times_ms.json.
+ return test_results.TestResult(self._test_name, self._failures())
+
+
+class ResultsJSONParser(object):
+ @classmethod
+ def parse_results_json(cls, json_string):
+ if not json_results_generator.has_json_wrapper(json_string):
+ return None
+
+ content_string = json_results_generator.strip_json_wrapper(json_string)
+ json_dict = json.loads(content_string)
+
+ json_results = []
+ for_each_test(json_dict['tests'], lambda test, result: json_results.append(JSONTestResult(test, result)))
+
+ # FIXME: What's the short sexy python way to filter None?
+ # I would use [foo.bar() for foo in foos if foo.bar()] but bar() is expensive.
+ unexpected_failures = [result.test_result() for result in json_results if not result.did_pass_or_run_as_expected()]
+ return filter(lambda a: a, unexpected_failures)
diff --git a/Tools/Scripts/webkitpy/common/net/resultsjsonparser_unittest.py b/Tools/Scripts/webkitpy/common/net/resultsjsonparser_unittest.py
new file mode 100644
index 000000000..1a2cba511
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/resultsjsonparser_unittest.py
@@ -0,0 +1,96 @@
+# Copyright (c) 2010, Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from webkitpy.common.net.resultsjsonparser import ResultsJSONParser
+from webkitpy.layout_tests.models import test_results
+from webkitpy.layout_tests.models import test_failures
+
+
+class ResultsJSONParserTest(unittest.TestCase):
+ # The real files have no whitespace, but newlines make this much more readable.
+ _example_full_results_json = """ADD_RESULTS({
+ "tests": {
+ "fast": {
+ "dom": {
+ "prototype-inheritance.html": {
+ "expected": "PASS",
+ "actual": "TEXT"
+ },
+ "prototype-banana.html": {
+ "expected": "TEXT",
+ "actual": "PASS"
+ },
+ "prototype-taco.html": {
+ "expected": "PASS",
+ "actual": "PASS TEXT"
+ },
+ "prototype-chocolate.html": {
+ "expected": "TEXT",
+ "actual": "TEXT"
+ },
+ "prototype-strawberry.html": {
+ "expected": "PASS",
+ "actual": "TEXT PASS"
+ },
+ "prototype-peach.html": {
+ "expected": "IMAGE+TEXT",
+ "actual": "TEXT"
+ }
+ }
+ },
+ "svg": {
+ "dynamic-updates": {
+ "SVGFEDropShadowElement-dom-stdDeviation-attr.html": {
+ "expected": "PASS",
+ "actual": "IMAGE",
+ "has_stderr": true
+ }
+ }
+ }
+ },
+ "skipped": 450,
+ "num_regressions": 15,
+ "layout_tests_dir": "\/b\/build\/slave\/Webkit_Mac10_5\/build\/src\/third_party\/WebKit\/LayoutTests",
+ "version": 3,
+ "num_passes": 77,
+ "has_pretty_patch": false,
+ "fixable": 1220,
+ "num_flaky": 0,
+ "uses_expectations_file": true,
+ "has_wdiff": false
+});"""
+
+ def test_basic(self):
+ expected_results = [
+ test_results.TestResult("svg/dynamic-updates/SVGFEDropShadowElement-dom-stdDeviation-attr.html", [test_failures.FailureImageHashMismatch()], 0),
+ test_results.TestResult("fast/dom/prototype-inheritance.html", [test_failures.FailureTextMismatch()], 0),
+ ]
+ results = ResultsJSONParser.parse_results_json(self._example_full_results_json)
+ self.assertEqual(expected_results, results)
diff --git a/Tools/Scripts/webkitpy/common/net/statusserver.py b/Tools/Scripts/webkitpy/common/net/statusserver.py
new file mode 100644
index 000000000..60153f8a9
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/statusserver.py
@@ -0,0 +1,170 @@
+# Copyright (C) 2009 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# This the client designed to talk to Tools/QueueStatusServer.
+
+from webkitpy.common.net.networktransaction import NetworkTransaction
+from webkitpy.common.system.deprecated_logging import log
+from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup
+
+import logging
+import urllib2
+
+
+_log = logging.getLogger(__name__)
+
+
+class StatusServer:
+ # FIXME: This should probably move to common.config.urls.
+ default_host = "queues.webkit.org"
+
+ def __init__(self, host=default_host, browser=None, bot_id=None):
+ self.set_host(host)
+ from webkitpy.thirdparty.autoinstalled.mechanize import Browser
+ self._browser = browser or Browser()
+ self.set_bot_id(bot_id)
+
+ def set_host(self, host):
+ self.host = host
+ self.url = "http://%s" % self.host
+
+ def set_bot_id(self, bot_id):
+ self.bot_id = bot_id
+
+ def results_url_for_status(self, status_id):
+ return "%s/results/%s" % (self.url, status_id)
+
+ def _add_patch(self, patch):
+ if not patch:
+ return
+ if patch.bug_id():
+ self._browser["bug_id"] = unicode(patch.bug_id())
+ if patch.id():
+ self._browser["patch_id"] = unicode(patch.id())
+
+ def _add_results_file(self, results_file):
+ if not results_file:
+ return
+ self._browser.add_file(results_file, "text/plain", "results.txt", 'results_file')
+
+ # 500 is the AppEngine limit for TEXT fields (which most of our fields are).
+ # Exceeding the limit will result in a 500 error from the server.
+ def _set_field(self, field_name, value, limit=500):
+ if len(value) > limit:
+ _log.warn("Attempted to set %s to value exceeding %s characters, truncating." % (field_name, limit))
+ self._browser[field_name] = value[:limit]
+
+ def _post_status_to_server(self, queue_name, status, patch, results_file):
+ if results_file:
+ # We might need to re-wind the file if we've already tried to post it.
+ results_file.seek(0)
+
+ update_status_url = "%s/update-status" % self.url
+ self._browser.open(update_status_url)
+ self._browser.select_form(name="update_status")
+ self._browser["queue_name"] = queue_name
+ if self.bot_id:
+ self._browser["bot_id"] = self.bot_id
+ self._add_patch(patch)
+ self._set_field("status", status, limit=500)
+ self._add_results_file(results_file)
+ return self._browser.submit().read() # This is the id of the newly created status object.
+
+ def _post_svn_revision_to_server(self, svn_revision_number, broken_bot):
+ update_svn_revision_url = "%s/update-svn-revision" % self.url
+ self._browser.open(update_svn_revision_url)
+ self._browser.select_form(name="update_svn_revision")
+ self._browser["number"] = unicode(svn_revision_number)
+ self._browser["broken_bot"] = broken_bot
+ return self._browser.submit().read()
+
+ def _post_work_items_to_server(self, queue_name, work_items):
+ update_work_items_url = "%s/update-work-items" % self.url
+ self._browser.open(update_work_items_url)
+ self._browser.select_form(name="update_work_items")
+ self._browser["queue_name"] = queue_name
+ work_items = map(unicode, work_items) # .join expects strings
+ self._browser["work_items"] = " ".join(work_items)
+ return self._browser.submit().read()
+
+ def _post_work_item_to_ews(self, attachment_id):
+ submit_to_ews_url = "%s/submit-to-ews" % self.url
+ self._browser.open(submit_to_ews_url)
+ self._browser.select_form(name="submit_to_ews")
+ self._browser["attachment_id"] = unicode(attachment_id)
+ self._browser.submit()
+
+ def submit_to_ews(self, attachment_id):
+ _log.info("Submitting attachment %s to EWS queues" % attachment_id)
+ return NetworkTransaction().run(lambda: self._post_work_item_to_ews(attachment_id))
+
+ def next_work_item(self, queue_name):
+ _log.debug("Fetching next work item for %s" % queue_name)
+ patch_status_url = "%s/next-patch/%s" % (self.url, queue_name)
+ return self._fetch_url(patch_status_url)
+
+ def _post_release_work_item(self, queue_name, patch):
+ release_patch_url = "%s/release-patch" % (self.url)
+ self._browser.open(release_patch_url)
+ self._browser.select_form(name="release_patch")
+ self._browser["queue_name"] = queue_name
+ self._browser["attachment_id"] = unicode(patch.id())
+ self._browser.submit()
+
+ def release_work_item(self, queue_name, patch):
+ _log.info("Releasing work item %s from %s" % (patch.id(), queue_name))
+ return NetworkTransaction(convert_404_to_None=True).run(lambda: self._post_release_work_item(queue_name, patch))
+
+ def update_work_items(self, queue_name, work_items):
+ _log.debug("Recording work items: %s for %s" % (work_items, queue_name))
+ return NetworkTransaction().run(lambda: self._post_work_items_to_server(queue_name, work_items))
+
+ def update_status(self, queue_name, status, patch=None, results_file=None):
+ log(status)
+ return NetworkTransaction().run(lambda: self._post_status_to_server(queue_name, status, patch, results_file))
+
+ def update_svn_revision(self, svn_revision_number, broken_bot):
+ log("SVN revision: %s broke %s" % (svn_revision_number, broken_bot))
+ return NetworkTransaction().run(lambda: self._post_svn_revision_to_server(svn_revision_number, broken_bot))
+
+ def _fetch_url(self, url):
+ # FIXME: This should use NetworkTransaction's 404 handling instead.
+ try:
+ return urllib2.urlopen(url).read()
+ except urllib2.HTTPError, e:
+ if e.code == 404:
+ return None
+ raise e
+
+ def patch_status(self, queue_name, patch_id):
+ patch_status_url = "%s/patch-status/%s/%s" % (self.url, queue_name, patch_id)
+ return self._fetch_url(patch_status_url)
+
+ def svn_revision(self, svn_revision_number):
+ svn_revision_url = "%s/svn-revision/%s" % (self.url, svn_revision_number)
+ return self._fetch_url(svn_revision_url)
diff --git a/Tools/Scripts/webkitpy/common/net/statusserver_mock.py b/Tools/Scripts/webkitpy/common/net/statusserver_mock.py
new file mode 100644
index 000000000..69d1ae807
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/statusserver_mock.py
@@ -0,0 +1,68 @@
+# Copyright (C) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from webkitpy.common.system.deprecated_logging import log
+
+
+class MockStatusServer(object):
+
+ def __init__(self, bot_id=None, work_items=None):
+ self.host = "example.com"
+ self.bot_id = bot_id
+ self._work_items = work_items or []
+
+ def patch_status(self, queue_name, patch_id):
+ return None
+
+ def svn_revision(self, svn_revision):
+ return None
+
+ def next_work_item(self, queue_name):
+ if not self._work_items:
+ return None
+ return self._work_items.pop(0)
+
+ def release_work_item(self, queue_name, patch):
+ log("MOCK: release_work_item: %s %s" % (queue_name, patch.id()))
+
+ def update_work_items(self, queue_name, work_items):
+ self._work_items = work_items
+ log("MOCK: update_work_items: %s %s" % (queue_name, work_items))
+
+ def submit_to_ews(self, patch_id):
+ log("MOCK: submit_to_ews: %s" % (patch_id))
+
+ def update_status(self, queue_name, status, patch=None, results_file=None):
+ log("MOCK: update_status: %s %s" % (queue_name, status))
+ return 187
+
+ def update_svn_revision(self, svn_revision, broken_bot):
+ return 191
+
+ def results_url_for_status(self, status_id):
+ return "http://dummy_url"
diff --git a/Tools/Scripts/webkitpy/common/net/statusserver_unittest.py b/Tools/Scripts/webkitpy/common/net/statusserver_unittest.py
new file mode 100644
index 000000000..1f0afd05b
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/statusserver_unittest.py
@@ -0,0 +1,43 @@
+# Copyright (c) 2010 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import unittest
+
+from webkitpy.common.net.statusserver import StatusServer
+from webkitpy.common.system.outputcapture import OutputCaptureTestCaseBase
+from webkitpy.common.net.web_mock import MockBrowser
+
+
+class StatusServerTest(OutputCaptureTestCaseBase):
+ def test_url_for_issue(self):
+ mock_browser = MockBrowser()
+ status_server = StatusServer(browser=mock_browser, bot_id='123')
+ status_server.update_status('queue name', 'the status')
+ self.assertEqual('queue name', mock_browser.params['queue_name'])
+ self.assertEqual('the status', mock_browser.params['status'])
+ self.assertEqual('123', mock_browser.params['bot_id'])
diff --git a/Tools/Scripts/webkitpy/common/net/web.py b/Tools/Scripts/webkitpy/common/net/web.py
new file mode 100644
index 000000000..b8a06e574
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/web.py
@@ -0,0 +1,36 @@
+# Copyright (C) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import urllib2
+
+from webkitpy.common.net.networktransaction import NetworkTransaction
+
+
+class Web(object):
+ def get_binary(self, url, convert_404_to_None=False):
+ return NetworkTransaction(convert_404_to_None=convert_404_to_None).run(lambda: urllib2.urlopen(url).read())
diff --git a/Tools/Scripts/webkitpy/common/net/web_mock.py b/Tools/Scripts/webkitpy/common/net/web_mock.py
new file mode 100644
index 000000000..596dd0a41
--- /dev/null
+++ b/Tools/Scripts/webkitpy/common/net/web_mock.py
@@ -0,0 +1,51 @@
+# Copyright (C) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import StringIO
+
+
+class MockWeb(object):
+ def get_binary(self, url, convert_404_to_None=False):
+ return "MOCK Web result, convert 404 to None=%s" % convert_404_to_None
+
+
+# FIXME: Classes which are using Browser probably want to use Web instead.
+class MockBrowser(object):
+ params = {}
+
+ def open(self, url):
+ pass
+
+ def select_form(self, name):
+ pass
+
+ def __setitem__(self, key, value):
+ self.params[key] = value
+
+ def submit(self):
+ return StringIO.StringIO()