summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--requirements.txt1
-rw-r--r--setup.cfg4
-rw-r--r--test-requirements.txt6
-rw-r--r--tests/disabled_shell_task.py2
-rw-r--r--tools/update_datasets.sh278
-rwxr-xr-xtools/zuul_enqueue.py35
-rw-r--r--turbo_hipster/cmd/analyse_historical.py9
-rw-r--r--turbo_hipster/cmd/report_historical.py2
-rw-r--r--turbo_hipster/cmd/server.py3
-rwxr-xr-xturbo_hipster/lib/gerrit-git-prep.sh14
-rw-r--r--turbo_hipster/lib/models.py13
-rw-r--r--turbo_hipster/task_plugins/real_db_upgrade/handle_results.py2
-rwxr-xr-xturbo_hipster/task_plugins/real_db_upgrade/nova_mysql_migrations.sh31
-rw-r--r--turbo_hipster/worker_manager.py4
14 files changed, 346 insertions, 58 deletions
diff --git a/requirements.txt b/requirements.txt
index 1788b85..ae6769a 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,3 @@
-pbr>=0.5.21,<0.6
gear>=0.5.4,<1.0.0
python-swiftclient
python-keystoneclient
diff --git a/setup.cfg b/setup.cfg
index 444a369..51192c4 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -15,10 +15,6 @@ classifier =
Programming Language :: Python :: 2.7
Programming Language :: Python :: 2.6
-[global]
-setup-hooks =
- pbr.hooks.setup_hook
-
[files]
packages =
turbo_hipster
diff --git a/test-requirements.txt b/test-requirements.txt
index a08c619..1943ed8 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,8 +1,4 @@
-# Install bounded pep8/pyflakes first, then let flake8 install
-pep8==1.4.5
-pyflakes==0.7.2
-flake8==2.0
-hacking>=0.5.6,<0.7
+hacking>=0.9.2,<0.10
coverage>=3.6
sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
diff --git a/tests/disabled_shell_task.py b/tests/disabled_shell_task.py
index 407e335..7bae862 100644
--- a/tests/disabled_shell_task.py
+++ b/tests/disabled_shell_task.py
@@ -136,7 +136,7 @@ class TestTaskRunner(base.TestWithGearman):
def side_effect():
raise Exception('check results failed!')
- #ShellTask._parse_and_check_results = _fake_parse_and_check_results
+ # ShellTask._parse_and_check_results = _fake_parse_and_check_results
mocked_parse_and_check_results.side_effect = side_effect
self.start_server()
diff --git a/tools/update_datasets.sh b/tools/update_datasets.sh
new file mode 100644
index 0000000..b635b84
--- /dev/null
+++ b/tools/update_datasets.sh
@@ -0,0 +1,278 @@
+#!/bin/bash
+#
+# Copyright 2014 Rackspace Australia
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# A tool to update a given dataset to a given version. Used to keep datasets
+# somewhat fresh rather than over-exercising old migrations.
+
+
+# Usage: ./update_datasets.sh VENV_NAME WORKING_DIR GIT_PATH DB_USER DB_PASS DB_NAME SEED_DATA OUTPUT_DATA
+# apt-get install git virtualenvwrapper python-pip mysql-server python-lxml build-essential libffi-dev
+
+# $1 is the unique job id
+# $2 is the working dir path
+# $3 is the path to the git repo path
+# $4 is the nova db user
+# $5 is the nova db password
+# $6 is the nova db name
+# $7 is the path to the seed dataset to test against
+# $8 is the logging.conf for openstack
+# $9 is the pip cache dir
+
+UNIQUE_ID=$1
+WORKING_DIR_PATH=`realpath $2`
+GIT_REPO_PATH=`realpath $3`
+DB_USER=$4
+DB_PASS=$5
+DB_NAME=$6
+DATASET_SEED_SQL=`realpath $7`
+DATASET_OUTPUT_SQL=$8
+
+SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+
+# We also support the following environment variables to tweak our behavour:
+# NOCLEANUP: if set to anything, don't cleanup at the end of the run
+
+pip_requires() {
+ pip install -q mysql-python
+ pip install -q eventlet
+ requires="tools/pip-requires"
+ if [ ! -e $requires ]
+ then
+ requires="requirements.txt"
+ fi
+ echo "Install pip requirements from $requires"
+ pip install -q -r $requires
+ echo "Requirements installed"
+}
+
+db_sync() {
+ # $1 is the test target (ie branch name)
+ # $2 is an (optional) destination version number
+
+ # Create a nova.conf file
+ cat - > $WORKING_DIR_PATH/nova-$1.conf <<EOF
+[DEFAULT]
+sql_connection = mysql://$DB_USER:$DB_PASS@localhost/$DB_NAME?charset=utf8
+#log_config = $LOG_CONF_FILE
+EOF
+
+ # Silently return git to a known good state (delete untracked files)
+ git clean -xfdq
+
+ echo "***** Start DB upgrade to state of $1 *****"
+ echo "HEAD of branch under test is:"
+ git log -n 1
+
+ echo "Setting up the nova-manage entry point"
+ python setup.py -q clean
+ python setup.py -q develop
+ python setup.py -q install
+
+ # Log the migrations present
+ echo "Migrations present:"
+ ls $GIT_REPO_PATH/nova/db/sqlalchemy/migrate_repo/versions/*.py | sed 's/.*\///' | egrep "^[0-9]+_"
+
+ # Flush innodb's caches
+ echo "Restarting mysql"
+ sudo service mysql stop
+ sudo service mysql start
+
+ start_version=`mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "select * from migrate_version \G" | grep version | sed 's/.*: //'`
+
+ if [ "%$2%" == "%%" ]
+ then
+ end_version=`ls $GIT_REPO_PATH/nova/db/sqlalchemy/migrate_repo/versions/*.py | sed 's/.*\///' | egrep "^[0-9]+_" | tail -1 | cut -f 1 -d "_"`
+ else
+ end_version=$2
+ fi
+
+ echo "Test will migrate from $start_version to $end_version"
+ if [ $end_version -lt $start_version ]
+ then
+ increment=-1
+ end_version=$(( $end_version + 1 ))
+ else
+ increment=1
+ start_version=$(( $start_version + 1))
+ fi
+
+ for i in `seq $start_version $increment $end_version`
+ do
+ set -x
+ $SCRIPT_DIR/nova-manage-wrapper.sh $VENV_PATH --config-file $WORKING_DIR_PATH/nova-$1.conf --verbose db sync --version $i
+ manage_exit=$?
+ set +x
+
+ echo "nova-manage returned exit code $manage_exit"
+ if [ $manage_exit -gt 0 ]
+ then
+ echo "Aborting early"
+ exit $manage_exit
+ fi
+ done
+
+ echo "***** Finished DB upgrade to state of $1 *****"
+}
+
+stable_release_db_sync() {
+ version=`mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "select * from migrate_version \G" | grep version | sed 's/.*: //'`
+
+ # Some databases are from Folsom
+ echo "Schema version is $version"
+ if [ $version -lt "161" ]
+ then
+ echo "Database is from Folsom! Upgrade via Grizzly"
+ git branch -D eol/grizzly || true
+ git remote update
+ git checkout -b eol/grizzly
+ # Use tag
+ git reset --hard grizzly-eol
+ pip_requires
+ db_sync "grizzly"
+ fi
+
+ version=`mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "select * from migrate_version \G" | grep version | sed 's/.*: //'`
+ # Some databases are from Grizzly
+ echo "Schema version is $version"
+ if [ $version -lt "216" ]
+ then
+ echo "Database is from Grizzly! Upgrade via Havana"
+ git branch -D eol/havana || true
+ git remote update
+ git checkout -b eol/havana
+ # Use tag
+ git reset --hard havana-eol
+ pip_requires
+ db_sync "havana"
+ fi
+
+ version=`mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "select * from migrate_version \G" | grep version | sed 's/.*: //'`
+ # Some databases are from Havana
+ echo "Schema version is $version"
+ if [ $version -lt "234" ]
+ then
+ echo "Database is from Havana! Upgrade via Icehouse"
+ git branch -D eol/icehouse || true
+ git remote update
+ git checkout -b eol/icehouse
+ # Use tag
+ git reset --hard icehouse-eol
+ pip_requires
+ db_sync "icehouse"
+ fi
+
+ version=`mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "select * from migrate_version \G" | grep version | sed 's/.*: //'`
+ # Some databases are from Icehouse
+ echo "Schema version is $version"
+ if [ $version -lt "254" ]
+ then
+ echo "Database is from Icehouse! Upgrade via Juno"
+ git branch -D stable/juno || true
+ git remote update
+ git checkout -b stable/juno
+ git reset --hard remotes/origin/stable/juno
+ pip_requires
+ db_sync "juno"
+ fi
+
+ version=`mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "select * from migrate_version \G" | grep version | sed 's/.*: //'`
+ # Some databases are from Juno
+ echo "Schema version is $version"
+ if [ $version -lt "280" ]
+ then
+ echo "Database is from Juno! Upgrade via Kilo"
+ git branch -D stable/kilo || true
+ git remote update
+ git checkout -b stable/kilo
+ git reset --hard remotes/origin/stable/kilo
+ pip_requires
+ db_sync "kilo"
+
+ # TODO(jhesketh): This is a bit of a hack until we update our datasets to
+ # have the flavour data migrated. We have to do this before upgrading from
+ set -x
+ $SCRIPT_DIR/nova-manage-wrapper.sh $VENV_PATH --config-file $WORKING_DIR_PATH/nova-kilo.conf --verbose db migrate_flavor_data --force
+ set +x
+ fi
+
+ # TODO(jhesketh): Add in Liberty here once released
+
+ # TODO(jhesketh): Make this more DRY and/or automatically match migration
+ # numbers to releases.
+}
+
+echo "Test running on "`hostname`" as "`whoami`" ("`echo ~`", $HOME)"
+echo "To execute this script manually, run this:"
+echo "$0 $@"
+
+# Setup the environment
+set -x
+export PATH=/usr/lib/ccache:$PATH
+#export PIP_INDEX_URL="http://www.rcbops.com/pypi/mirror"
+export PIP_INDEX_URL="http://pypi.openstack.org/simple/"
+export PIP_EXTRA_INDEX_URL="https://pypi.python.org/simple/"
+which pip
+pip --version
+which virtualenv
+virtualenv --version
+which mkvirtualenv
+set +x
+
+# Restore database to known good state
+echo "Restoring test database $DB_NAME"
+set -x
+mysql -u $DB_USER --password=$DB_PASS -e "drop database $DB_NAME"
+mysql -u $DB_USER --password=$DB_PASS -e "create database $DB_NAME"
+mysql -u $DB_USER --password=$DB_PASS $DB_NAME < $DATASET_SEED_SQL
+set +x
+
+echo "Build test environment"
+cd $GIT_REPO_PATH
+
+echo "Setting up virtual env"
+source ~/.bashrc
+export WORKON_HOME=`pwd`/envs
+mkdir -p $WORKON_HOME
+VENV_PATH=$WORKON_HOME/$UNIQUE_ID
+rm -rf $VENV_PATH
+source /usr/local/bin/virtualenvwrapper.sh
+source /etc/bash_completion.d/virtualenvwrapper
+mkvirtualenv --no-site-packages $UNIQUE_ID
+#toggleglobalsitepackages
+export PYTHONPATH=$PYTHONPATH:$GIT_REPO_PATH
+
+if [ ! -e $VENV_PATH ]
+then
+ echo "Error: making the virtual env failed"
+ exit 1
+fi
+
+stable_release_db_sync
+
+# Determine the final schema version
+version=`mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "select * from migrate_version \G" | grep version | sed 's/.*: //'`
+echo "Final schema version is $version"
+
+if [ "%$NOCLEANUP%" == "%%" ]
+then
+ # Cleanup virtual env
+ echo "Cleaning up virtual env"
+ deactivate
+ rmvirtualenv $UNIQUE_ID
+fi
+
+cd $SCRIPT_DIR
+mysqldump -u $DB_USER --password=$DB_PASS $DB_NAME > $DATASET_OUTPUT_SQL
diff --git a/tools/zuul_enqueue.py b/tools/zuul_enqueue.py
index 0073257..ad9e653 100755
--- a/tools/zuul_enqueue.py
+++ b/tools/zuul_enqueue.py
@@ -9,6 +9,8 @@ import traceback
# Set the user to watch
user = 'turbo-hipster'
author_name = 'DB Datasets CI'
+upstream_user = 'jenkins'
+upstream_author_name = "Jenkins"
# Grab a list of missing or negative reviews for a user:
url = ("https://review.openstack.org/changes/?q=status:open "
@@ -21,6 +23,7 @@ r = requests.get(url)
no_votes = []
negative_votes = []
merge_failures = []
+upstream_merge_failures = []
unknown = []
for change in json.loads(r.text[5:]):
@@ -28,41 +31,56 @@ for change in json.loads(r.text[5:]):
patchset = change['revisions'][change['current_revision']]['_number']
change_id = str(change['_number']) + ',' + str(patchset)
last_message = None
+ last_upstream_message = None
for message in sorted(change['messages'],
key=lambda k: (k['_revision_number'],
k['date']), reverse=True):
if message['_revision_number'] < patchset:
# Finished looking at all the messages on this patchset
break
- if message['author']['name'] == author_name:
+ if not last_message and message['author']['name'] == author_name:
last_message = message['message']
- break
+ if (not last_upstream_message and
+ message['author']['name'] == upstream_author_name):
+ last_upstream_message = message['message']
- if not last_message:
+ if (last_upstream_message and
+ 'Merge Failed.' in last_upstream_message.split('\n')[2]):
+ upstream_merge_failures.append({
+ 'change_id': change_id,
+ 'updated': change['updated'],
+ 'change': change,
+ 'last_upstream_message': last_upstream_message,
+ })
+ elif not last_message:
# turbo-hister hasn't commented on this patchset
no_votes.append({
'change_id': change_id,
'updated': change['updated'],
- 'change': change
+ 'change': change,
+ 'last_upstream_message': last_upstream_message,
})
elif ('This change was unable to be automatically merged with the '
'current state of the repository.' in last_message):
merge_failures.append({
'change_id': change_id,
'updated': change['updated'],
- 'change': change
+ 'change': change,
+ 'last_upstream_message': last_upstream_message,
})
elif 'Database migration testing failed' in last_message:
negative_votes.append({
'change_id': change_id,
'updated': change['updated'],
- 'change': change
+ 'change': change,
+ 'last_upstream_message': last_upstream_message,
})
else:
unknown.append({
'change_id': change_id,
'updated': change['updated'],
- 'change': change
+ 'change': change,
+ 'last_upstream_message': last_upstream_message,
})
except Exception:
@@ -86,3 +104,6 @@ print ("=" * 20 + (" Changes with merge failure (%d) " % len(merge_failures)) +
print_enqueues(merge_failures)
print "=" * 20 + (" Others in this query (%d) " % len(unknown)) + "=" * 20
print_enqueues(unknown)
+print "=" * 20 + (" Changes with merge failures upstream (%d) "
+ % len(upstream_merge_failures)) + "=" * 20
+print_enqueues(upstream_merge_failures)
diff --git a/turbo_hipster/cmd/analyse_historical.py b/turbo_hipster/cmd/analyse_historical.py
index c76ed4f..a7ce33a 100644
--- a/turbo_hipster/cmd/analyse_historical.py
+++ b/turbo_hipster/cmd/analyse_historical.py
@@ -33,8 +33,7 @@ from turbo_hipster.task_plugins.real_db_upgrade import handle_results
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config',
- default=
- '/etc/turbo-hipster/config.yaml',
+ default='/etc/turbo-hipster/config.yaml',
help='Path to yaml config file.')
args = parser.parse_args()
@@ -82,7 +81,7 @@ def main():
if cursor.rowcount == 0:
for engine, dataset, migration in process(
connection, swift_config['container'], item['name']):
- if not 'duration' in migration:
+ if 'duration' not in migration:
continue
if migration['stats']:
@@ -145,9 +144,9 @@ def process(connection, container, name):
log.warn('Log %s contained no migrations' % name)
for migration in lp.migrations:
- if not 'start' in migration:
+ if 'start' not in migration:
continue
- if not 'end' in migration:
+ if 'end' not in migration:
continue
yield (engine_name, test_name, migration)
diff --git a/turbo_hipster/cmd/report_historical.py b/turbo_hipster/cmd/report_historical.py
index 35f3c9b..50e4a28 100644
--- a/turbo_hipster/cmd/report_historical.py
+++ b/turbo_hipster/cmd/report_historical.py
@@ -116,7 +116,7 @@ def process_dataset(dataset):
math.ceil(recommend)
# Innodb stats
- if not migration in stats_summary:
+ if migration not in stats_summary:
continue
for stats_key in ['XInnodb_rows_changed', 'Innodb_rows_read']:
diff --git a/turbo_hipster/cmd/server.py b/turbo_hipster/cmd/server.py
index 30d8aaf..06f9415 100644
--- a/turbo_hipster/cmd/server.py
+++ b/turbo_hipster/cmd/server.py
@@ -65,8 +65,7 @@ def main():
os.path.join(os.path.dirname(__file__), '../')))
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config',
- default=
- '/etc/turbo-hipster/config.yaml',
+ default='/etc/turbo-hipster/config.yaml',
help='Path to yaml config file.')
parser.add_argument('-b', '--background', action='store_true',
help='Run as a daemon in the background.')
diff --git a/turbo_hipster/lib/gerrit-git-prep.sh b/turbo_hipster/lib/gerrit-git-prep.sh
index 5636241..a0b1349 100755
--- a/turbo_hipster/lib/gerrit-git-prep.sh
+++ b/turbo_hipster/lib/gerrit-git-prep.sh
@@ -50,19 +50,19 @@ then
rm -fr .[^.]* *
if [ -d /opt/git/$ZUUL_PROJECT/.git ]
then
- git clone file:///opt/git/$ZUUL_PROJECT .
+ git clone -vvvvvv file:///opt/git/$ZUUL_PROJECT .
else
- git clone $GIT_ORIGIN/$ZUUL_PROJECT .
+ git clone -vvvvvv $GIT_ORIGIN/$ZUUL_PROJECT .
fi
fi
-git remote set-url origin $GIT_ORIGIN/$ZUUL_PROJECT
+git remote -vvvvvv set-url origin $GIT_ORIGIN/$ZUUL_PROJECT
# attempt to work around bugs 925790 and 1229352
-if ! git remote update
+if ! git remote -vvvvvv update
then
echo "The remote update failed, so garbage collecting before trying again."
git gc
- git remote update
+ git remote -vvvvvv update
fi
git reset --hard
@@ -73,12 +73,12 @@ fi
if echo "$ZUUL_REF" | grep -q ^refs/tags/
then
- git fetch --tags $ZUUL_URL/$ZUUL_PROJECT
+ git fetch -vvvvvv --tags $ZUUL_URL/$ZUUL_PROJECT
git checkout $ZUUL_REF
git reset --hard $ZUUL_REF
elif [ -z "$ZUUL_NEWREV" ]
then
- git fetch $ZUUL_URL/$ZUUL_PROJECT $ZUUL_REF
+ git fetch -vvvvvv $ZUUL_URL/$ZUUL_PROJECT $ZUUL_REF
git checkout FETCH_HEAD
git reset --hard FETCH_HEAD
else
diff --git a/turbo_hipster/lib/models.py b/turbo_hipster/lib/models.py
index 1a2c836..3987733 100644
--- a/turbo_hipster/lib/models.py
+++ b/turbo_hipster/lib/models.py
@@ -48,7 +48,7 @@ class Task(object):
self.log_handler.flush()
self.log_handler.close()
if ('shutdown-th' in self.job_config and
- self.job_config['shutdown-th']):
+ self.job_config['shutdown-th']):
self.worker_server.shutdown_gracefully()
def _reset(self):
@@ -303,7 +303,9 @@ class ShellTask(Task):
if not os.path.exists(local_path):
os.makedirs(local_path)
+ env = os.environ
git_args = copy.deepcopy(job_args)
+ env.update(git_args)
cmd = os.path.join(
os.path.join(os.path.dirname(os.path.abspath(__file__)),
@@ -318,16 +320,17 @@ class ShellTask(Task):
return_code = 1
while return_code != 0:
tries += 1
+ env.update({'GIT_CURL_VERBOSE': '1', 'GIT_TRACE': '1'})
return_code = utils.execute_to_log(cmd, self.git_prep_log,
- env=git_args, cwd=local_path)
- if tries >= 2:
+ env=env, cwd=local_path)
+ if tries == 2:
# Try upping the post buffer. See:
# http://stackoverflow.com/questions/6842687/
# the-remote-end-hung-up-unexpectedly-while-git-cloning
utils.execute_to_log(
"git config --global http.postBuffer 1048576000",
- self.git_prep_log, env=git_args, cwd=local_path)
- if tries >= 3:
+ self.git_prep_log, env=env, cwd=local_path)
+ if tries >= 4:
break
if return_code != 0:
cmd = 'ifconfig'
diff --git a/turbo_hipster/task_plugins/real_db_upgrade/handle_results.py b/turbo_hipster/task_plugins/real_db_upgrade/handle_results.py
index 198afe1..e701c5b 100644
--- a/turbo_hipster/task_plugins/real_db_upgrade/handle_results.py
+++ b/turbo_hipster/task_plugins/real_db_upgrade/handle_results.py
@@ -188,7 +188,7 @@ def check_log_file(log_file, git_path, dataset):
migration.setdefault('stats', {})
# check migration completed
- if not 'duration' in migration:
+ if 'duration' not in migration:
success = False
messages.append('WARNING - Migration %s->%s failed to complete'
% (migration['from'], migration['to']))
diff --git a/turbo_hipster/task_plugins/real_db_upgrade/nova_mysql_migrations.sh b/turbo_hipster/task_plugins/real_db_upgrade/nova_mysql_migrations.sh
index 2a15bda..62d9e53 100755
--- a/turbo_hipster/task_plugins/real_db_upgrade/nova_mysql_migrations.sh
+++ b/turbo_hipster/task_plugins/real_db_upgrade/nova_mysql_migrations.sh
@@ -115,20 +115,6 @@ EOF
for i in `seq $start_version $increment $end_version`
do
- # TODO(jhesketh): This is a bit of a hack until we update our datasets to
- # have the flavour data migrated. We know 291 does the migration check
- # so we'll migrate just before then
- if [ $i == 291 ]
- then
- set -x
- echo "MySQL counters before migrate_flavor_data:"
- mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "show status like 'innodb%';"
- sudo /sbin/ip netns exec nonet `dirname $0`/nova-manage-wrapper.sh $VENV_PATH --config-file $WORKING_DIR_PATH/nova-$1.conf --verbose db migrate_flavor_data --force
- echo "MySQL counters after migrate_flavor_data:"
- mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "show status like 'innodb%';"
- set +x
- fi
-
set -x
sudo /sbin/ip netns exec nonet `dirname $0`/nova-manage-wrapper.sh $VENV_PATH --config-file $WORKING_DIR_PATH/nova-$1.conf --verbose db sync --version $i
manage_exit=$?
@@ -186,10 +172,11 @@ stable_release_db_sync() {
if [ $version -lt "234" ]
then
echo "Database is from Havana! Upgrade via Icehouse"
- git branch -D stable/icehouse || true
+ git branch -D eol/icehouse || true
git remote update
- git checkout -b stable/icehouse
- git reset --hard remotes/origin/stable/icehouse
+ git checkout -b eol/icehouse
+ # Use tag
+ git reset --hard icehouse-eol
pip_requires
db_sync "icehouse"
fi
@@ -220,6 +207,16 @@ stable_release_db_sync() {
git reset --hard remotes/origin/stable/kilo
pip_requires
db_sync "kilo"
+
+ # TODO(jhesketh): This is a bit of a hack until we update our datasets to
+ # have the flavour data migrated. We have to do this before upgrading from
+ # set -x
+ # echo "MySQL counters before migrate_flavor_data:"
+ # mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "show status like 'innodb%';"
+ # sudo /sbin/ip netns exec nonet `dirname $0`/nova-manage-wrapper.sh $VENV_PATH --config-file $WORKING_DIR_PATH/nova-kilo.conf --verbose db migrate_flavor_data --force
+ # echo "MySQL counters after migrate_flavor_data:"
+ # mysql -u $DB_USER --password=$DB_PASS $DB_NAME -e "show status like 'innodb%';"
+ # set +x
fi
# TODO(jhesketh): Add in Liberty here once released
diff --git a/turbo_hipster/worker_manager.py b/turbo_hipster/worker_manager.py
index 40a1f35..a1cf47b 100644
--- a/turbo_hipster/worker_manager.py
+++ b/turbo_hipster/worker_manager.py
@@ -84,7 +84,7 @@ class ZuulManager(threading.Thread):
self.log.debug("Waiting for server")
self.gearman_worker.waitForServer()
if (not self.stopped() and self.gearman_worker.running and
- self.gearman_worker.active_connections):
+ self.gearman_worker.active_connections):
self.register_functions()
self.gearman_worker.waitForServer()
logging.debug("Waiting for job")
@@ -178,7 +178,7 @@ class ZuulClient(threading.Thread):
self.log.debug("Waiting for server")
self.gearman_worker.waitForServer()
if (not self.stopped() and self.gearman_worker.running and
- self.gearman_worker.active_connections):
+ self.gearman_worker.active_connections):
self.register_functions()
self.gearman_worker.waitForServer()
self.log.debug("Waiting for job")