summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CONTRIBUTING.rst7
-rw-r--r--README.rst4
-rw-r--r--doc/requirements.txt13
-rw-r--r--doc/source/install/index.rst14
-rw-r--r--oslo_db/_i18n.py2
-rw-r--r--oslo_db/api.py5
-rw-r--r--oslo_db/exception.py19
-rw-r--r--oslo_db/sqlalchemy/enginefacade.py10
-rw-r--r--oslo_db/sqlalchemy/migration.py2
-rw-r--r--oslo_db/sqlalchemy/provision.py31
-rw-r--r--oslo_db/sqlalchemy/test_base.py4
-rw-r--r--oslo_db/sqlalchemy/test_fixtures.py2
-rw-r--r--oslo_db/sqlalchemy/update_match.py6
-rw-r--r--oslo_db/sqlalchemy/utils.py194
-rw-r--r--oslo_db/tests/sqlalchemy/test_async_eventlet.py2
-rw-r--r--oslo_db/tests/sqlalchemy/test_enginefacade.py12
-rw-r--r--oslo_db/tests/sqlalchemy/test_exc_filters.py2
-rw-r--r--oslo_db/tests/sqlalchemy/test_fixtures.py4
-rw-r--r--oslo_db/tests/sqlalchemy/test_migration_common.py6
-rw-r--r--oslo_db/tests/sqlalchemy/test_ndb.py2
-rw-r--r--oslo_db/tests/sqlalchemy/test_provision.py4
-rw-r--r--oslo_db/tests/sqlalchemy/test_utils.py244
-rw-r--r--releasenotes/notes/MySQL-python-no-longer-tested-2a6c32cce6b03215.yaml8
-rw-r--r--releasenotes/source/conf.py13
-rw-r--r--releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po183
-rw-r--r--requirements.txt8
-rw-r--r--setup.cfg21
-rw-r--r--tox.ini15
28 files changed, 593 insertions, 244 deletions
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 51a0c3d..88a37f4 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -5,13 +5,13 @@ How to contribute
If you would like to contribute to the development of OpenStack,
you must follow the steps in this page:
- http://docs.openstack.org/infra/manual/developers.html
+ https://docs.openstack.org/infra/manual/developers.html
Once those steps have been completed, changes to OpenStack
should be submitted for review via the Gerrit tool, following
the workflow documented at:
- http://docs.openstack.org/infra/manual/developers.html#development-workflow
+ https://docs.openstack.org/infra/manual/developers.html#development-workflow
Pull requests submitted through GitHub will be ignored.
@@ -32,9 +32,6 @@ venv, so you must ensure that you have the required system packages installed
for psycopg2 (PyMySQL is a pure-Python implementation and so needs no
additional system packages). For Ubuntu/Debian they are python-dev, and
libpq-dev. For Fedora/CentOS - gcc, python-devel and postgresql-devel.
-There is also a separate env for testing with MySQL-python. If you are suppose
-to run these tests as well, you need to install libmysqlclient-dev on
-Ubuntu/Debian or mysql-devel for Fedora/CentOS.
The oslo.db unit tests system allows to run unittests on real databases. At the
moment it supports MySQL, PostgreSQL and SQLite.
diff --git a/README.rst b/README.rst
index 6e53fcb..ee1bf23 100644
--- a/README.rst
+++ b/README.rst
@@ -2,8 +2,8 @@
Team and repository tags
========================
-.. image:: http://governance.openstack.org/badges/oslo.db.svg
- :target: http://governance.openstack.org/reference/tags/index.html
+.. image:: https://governance.openstack.org/badges/oslo.db.svg
+ :target: https://governance.openstack.org/tc/reference/tags/index.html
.. Change things from this point on
diff --git a/doc/requirements.txt b/doc/requirements.txt
new file mode 100644
index 0000000..3882b4d
--- /dev/null
+++ b/doc/requirements.txt
@@ -0,0 +1,13 @@
+# The order of packages is significant, because pip processes them in the order
+# of appearance. Changing the order has an impact on the overall integration
+# process, which may cause wedges in the gate later.
+openstackdocstheme>=1.17.0 # Apache-2.0
+sphinx>=1.6.2 # BSD
+doc8>=0.6.0 # Apache-2.0
+reno>=2.5.0 # Apache-2.0
+
+# These modules are needed when generating document
+fixtures>=3.0.0 # Apache-2.0/BSD
+testresources>=2.0.0 # Apache-2.0/BSD
+testscenarios>=0.4 # Apache-2.0/BSD
+oslotest>=1.10.0 # Apache-2.0
diff --git a/doc/source/install/index.rst b/doc/source/install/index.rst
index 1262160..1390f94 100644
--- a/doc/source/install/index.rst
+++ b/doc/source/install/index.rst
@@ -33,17 +33,3 @@ Note that even in a virtual environment the libpq-dev will be installed
system wide.
-Using with MySQL-python
------------------------
-
-PyMySQL is a default MySQL DB API driver for oslo.db, as well as for the whole
-OpenStack. But you still can use MySQL-python as an alternative DB API driver.
-For MySQL-python you must install the MySQL client development package for
-your distro. On Ubuntu this is done as follows::
-
- $ sudo apt-get install libmysqlclient-dev
- $ pip install MySQL-python
-
-The installation of MySQL-python will fail if libmysqlclient-dev is not
-installed first. Note that even in a virtual environment the MySQL package will
-be installed system wide.
diff --git a/oslo_db/_i18n.py b/oslo_db/_i18n.py
index 413bc41..3d75f69 100644
--- a/oslo_db/_i18n.py
+++ b/oslo_db/_i18n.py
@@ -12,7 +12,7 @@
"""oslo.i18n integration module.
-See http://docs.openstack.org/developer/oslo.i18n/usage.html .
+See https://docs.openstack.org/oslo.i18n/latest/user/index.html .
"""
diff --git a/oslo_db/api.py b/oslo_db/api.py
index 621631b..62cf889 100644
--- a/oslo_db/api.py
+++ b/oslo_db/api.py
@@ -27,7 +27,6 @@ import logging
import threading
import time
-from debtcollector import removals
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import reflection
@@ -106,12 +105,10 @@ class wrap_db_retry(object):
:type exception_checker: callable
"""
- @removals.removed_kwarg("retry_on_request",
- "Retry on request is always enabled")
def __init__(self, retry_interval=1, max_retries=20,
inc_retry_interval=True,
max_retry_interval=10, retry_on_disconnect=False,
- retry_on_deadlock=False, retry_on_request=False,
+ retry_on_deadlock=False,
exception_checker=lambda exc: False):
super(wrap_db_retry, self).__init__()
diff --git a/oslo_db/exception.py b/oslo_db/exception.py
index c8da996..fcdfd4e 100644
--- a/oslo_db/exception.py
+++ b/oslo_db/exception.py
@@ -43,7 +43,6 @@ with `try/except` statement. This is required for consistent handling of
database errors.
"""
-import debtcollector.removals
import six
from oslo_db._i18n import _
@@ -188,13 +187,6 @@ class DBInvalidUnicodeParameter(Exception):
without encoding directive.
"""
- @debtcollector.removals.removed_property
- def message(self):
- # NOTE(rpodolyaka): provided for compatibility with python 3k, where
- # exceptions do not have .message attribute, while we used to have one
- # in this particular exception class. See LP #1542961 for details.
- return str(self)
-
def __init__(self):
super(DBInvalidUnicodeParameter, self).__init__(
_("Invalid Parameter: Encoding directive wasn't provided."))
@@ -221,10 +213,6 @@ class DBMigrationError(DbMigrationError):
super(DBMigrationError, self).__init__(message)
-debtcollector.removals.removed_class(DbMigrationError,
- replacement=DBMigrationError)
-
-
class DBConnectionError(DBError):
"""Wrapped connection specific exception.
@@ -250,13 +238,6 @@ class DBNotSupportedError(DBError):
class InvalidSortKey(Exception):
"""A sort key destined for database query usage is invalid."""
- @debtcollector.removals.removed_property
- def message(self):
- # NOTE(rpodolyaka): provided for compatibility with python 3k, where
- # exceptions do not have .message attribute, while we used to have one
- # in this particular exception class. See LP #1542961 for details.
- return str(self)
-
def __init__(self, key=None):
super(InvalidSortKey, self).__init__(
_("Sort key supplied is invalid: %s") % key)
diff --git a/oslo_db/sqlalchemy/enginefacade.py b/oslo_db/sqlalchemy/enginefacade.py
index 8213080..80c320b 100644
--- a/oslo_db/sqlalchemy/enginefacade.py
+++ b/oslo_db/sqlalchemy/enginefacade.py
@@ -13,7 +13,6 @@
import contextlib
import functools
-import inspect
import operator
import threading
import warnings
@@ -27,6 +26,7 @@ from oslo_db import exception
from oslo_db import options
from oslo_db.sqlalchemy import engines
from oslo_db.sqlalchemy import orm
+from oslo_db.sqlalchemy import utils
class _symbol(object):
@@ -843,7 +843,8 @@ class _TransactionContextManager(object):
new = self._clone()
new._root = new
new._root_factory = self._root_factory._create_factory_copy()
- assert not new._factory._started
+ if new._factory._started:
+ raise AssertionError('TransactionFactory is already started')
return new
def patch_factory(self, factory_or_manager):
@@ -869,7 +870,8 @@ class _TransactionContextManager(object):
raise ValueError(
"_TransactionContextManager or "
"_TransactionFactory expected.")
- assert self._root is self
+ if self._root is not self:
+ raise AssertionError('patch_factory only works for root factory.')
existing_factory = self._root_factory
self._root_factory = factory
@@ -970,7 +972,7 @@ class _TransactionContextManager(object):
def __call__(self, fn):
"""Decorate a function."""
- argspec = inspect.getargspec(fn)
+ argspec = utils.getargspec(fn)
if argspec.args[0] == 'self' or argspec.args[0] == 'cls':
context_index = 1
else:
diff --git a/oslo_db/sqlalchemy/migration.py b/oslo_db/sqlalchemy/migration.py
index 457dd35..31a6105 100644
--- a/oslo_db/sqlalchemy/migration.py
+++ b/oslo_db/sqlalchemy/migration.py
@@ -78,7 +78,7 @@ def db_sync(engine, abs_path, version=None, init_version=0, sanity_check=True):
try:
migration = versioning_api.upgrade(engine, repository, version)
except Exception as ex:
- raise exception.DbMigrationError(ex)
+ raise exception.DBMigrationError(ex)
else:
migration = versioning_api.downgrade(engine, repository,
version)
diff --git a/oslo_db/sqlalchemy/provision.py b/oslo_db/sqlalchemy/provision.py
index 7aa1fd3..fb1e191 100644
--- a/oslo_db/sqlalchemy/provision.py
+++ b/oslo_db/sqlalchemy/provision.py
@@ -17,7 +17,6 @@
"""Provision test environment for specific DB backends"""
import abc
-import debtcollector
import logging
import os
import random
@@ -33,7 +32,6 @@ import testresources
from oslo_db import exception
from oslo_db.sqlalchemy import enginefacade
-from oslo_db.sqlalchemy import session
from oslo_db.sqlalchemy import utils
LOG = logging.getLogger(__name__)
@@ -503,39 +501,10 @@ class BackendImpl(object):
url.database = ident
return url
- @debtcollector.removals.remove()
- def provisioned_engine(self, base_url, ident):
- """Return a provisioned engine.
-
- Given the URL of a particular database backend and the string
- name of a particular 'database' within that backend, return
- an Engine instance whose connections will refer directly to the
- named database.
-
- For hostname-based URLs, this typically involves switching just the
- 'database' portion of the URL with the given name and creating
- an engine.
-
- For URLs that instead deal with DSNs, the rules may be more custom;
- for example, the engine may need to connect to the root URL and
- then emit a command to switch to the named database.
-
- """
- url = self.provisioned_database_url(base_url, ident)
-
- return session.create_engine(
- url,
- logging_name="%s@%s" % (self.drivername, ident),
- **self.default_engine_kwargs
- )
-
@BackendImpl.impl.dispatch_for("mysql")
class MySQLBackendImpl(BackendImpl):
- # only used for deprecated provisioned_engine() function.
- default_engine_kwargs = {'mysql_sql_mode': 'TRADITIONAL'}
-
def create_opportunistic_driver_url(self):
return "mysql+pymysql://openstack_citest:openstack_citest@localhost/"
diff --git a/oslo_db/sqlalchemy/test_base.py b/oslo_db/sqlalchemy/test_base.py
index 401cb11..f4659e8 100644
--- a/oslo_db/sqlalchemy/test_base.py
+++ b/oslo_db/sqlalchemy/test_base.py
@@ -69,7 +69,7 @@ class DbFixture(fixtures.Fixture):
if not self.test._has_db_resource():
msg = self.test._get_db_resource_not_available_reason()
if self.test.SKIP_ON_UNAVAILABLE_DB:
- self.test.skip(msg)
+ self.test.skipTest(msg)
else:
self.test.fail(msg)
@@ -215,7 +215,7 @@ def backend_specific(*dialects):
'only on %s. Current engine is %s.')
args = (reflection.get_callable_name(f), ', '.join(dialects),
self.engine.name)
- self.skip(msg % args)
+ self.skipTest(msg % args)
else:
return f(self)
return ins_wrap
diff --git a/oslo_db/sqlalchemy/test_fixtures.py b/oslo_db/sqlalchemy/test_fixtures.py
index 210df67..6b82f05 100644
--- a/oslo_db/sqlalchemy/test_fixtures.py
+++ b/oslo_db/sqlalchemy/test_fixtures.py
@@ -534,7 +534,7 @@ class OpportunisticDBTestMixin(object):
if not fixture._has_db_resource():
msg = fixture._get_db_resource_not_available_reason()
if self.SKIP_ON_UNAVAILABLE_DB:
- self.skip(msg)
+ self.skipTest(msg)
else:
self.fail(msg)
diff --git a/oslo_db/sqlalchemy/update_match.py b/oslo_db/sqlalchemy/update_match.py
index 5765817..543101e 100644
--- a/oslo_db/sqlalchemy/update_match.py
+++ b/oslo_db/sqlalchemy/update_match.py
@@ -164,9 +164,9 @@ def update_on_match(
entity = inspect(specimen)
mapper = entity.mapper
- assert \
- [desc['type'] for desc in query.column_descriptions] == \
- [mapper.class_], "Query does not match given specimen"
+ if [desc['type'] for desc in query.column_descriptions] != \
+ [mapper.class_]:
+ raise AssertionError("Query does not match given specimen")
criteria = manufacture_entity_criteria(
specimen, include_only=include_only, exclude=[surrogate_key])
diff --git a/oslo_db/sqlalchemy/utils.py b/oslo_db/sqlalchemy/utils.py
index 4961a70..34d26de 100644
--- a/oslo_db/sqlalchemy/utils.py
+++ b/oslo_db/sqlalchemy/utils.py
@@ -18,10 +18,13 @@
import collections
import contextlib
+import inspect as pyinspect
import itertools
import logging
import re
+from alembic.migration import MigrationContext
+from alembic.operations import Operations
from oslo_utils import timeutils
import six
import sqlalchemy
@@ -47,6 +50,7 @@ from sqlalchemy.types import NullType
from oslo_db._i18n import _
from oslo_db import exception
from oslo_db.sqlalchemy import models
+from oslo_db.sqlalchemy import ndb
# NOTE(ochuprykov): Add references for backwards compatibility
InvalidSortKey = exception.InvalidSortKey
@@ -177,7 +181,9 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
LOG.warning('Unique keys not in sort_keys. '
'The sorting order may be unstable.')
- assert(not (sort_dir and sort_dirs))
+ if sort_dir and sort_dirs:
+ raise AssertionError('Disallow set sort_dir and '
+ 'sort_dirs at the same time.')
# Default the sort direction to ascending
if sort_dirs is None and sort_dir is None:
@@ -187,7 +193,8 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
if sort_dirs is None:
sort_dirs = [sort_dir for _sort_key in sort_keys]
- assert(len(sort_dirs) == len(sort_keys))
+ if len(sort_dirs) != len(sort_keys):
+ raise AssertionError('sort_dirs and sort_keys must have same length.')
# Add sorting
for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs):
@@ -238,7 +245,8 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
if marker_values[i] is not None:
for j in range(i):
model_attr = getattr(model, sort_keys[j])
- crit_attrs.append((model_attr == marker_values[j]))
+ if marker_values[j] is not None:
+ crit_attrs.append((model_attr == marker_values[j]))
model_attr = getattr(model, sort_keys[i])
val = marker_values[i]
@@ -247,9 +255,16 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
val = int(val)
model_attr = cast(model_attr, Integer)
if sort_dirs[i].startswith('desc'):
- crit_attrs.append((model_attr < val))
+ crit_attr = (model_attr < val)
+ if sort_dirs[i].endswith('nullsfirst'):
+ crit_attr = sqlalchemy.sql.or_(crit_attr,
+ model_attr.is_(None))
else:
- crit_attrs.append((model_attr > val))
+ crit_attr = (model_attr > val)
+ if sort_dirs[i].endswith('nullslast'):
+ crit_attr = sqlalchemy.sql.or_(crit_attr,
+ model_attr.is_(None))
+ crit_attrs.append(crit_attr)
criteria = sqlalchemy.sql.and_(*crit_attrs)
criteria_list.append(criteria)
@@ -700,54 +715,6 @@ def _change_deleted_column_type_to_id_type_sqlite(engine, table_name,
execute()
-def get_connect_string(backend, database, user=None, passwd=None,
- host='localhost'):
- """Get database connection
-
- Try to get a connection with a very specific set of values, if we get
- these then we'll run the tests, otherwise they are skipped
-
- DEPRECATED: this function is deprecated and will be removed from oslo_db
- in a few releases. Please use the provisioning system for dealing
- with URLs and database provisioning.
-
- """
- args = {'backend': backend,
- 'user': user,
- 'passwd': passwd,
- 'host': host,
- 'database': database}
- if backend == 'sqlite':
- template = '%(backend)s:///%(database)s'
- else:
- template = "%(backend)s://%(user)s:%(passwd)s@%(host)s/%(database)s"
- return template % args
-
-
-def is_backend_avail(backend, database, user=None, passwd=None):
- """Return True if the given backend is available.
-
-
- DEPRECATED: this function is deprecated and will be removed from oslo_db
- in a few releases. Please use the provisioning system to access
- databases based on backend availability.
-
- """
- from oslo_db.sqlalchemy import provision
-
- connect_uri = get_connect_string(backend=backend,
- database=database,
- user=user,
- passwd=passwd)
- try:
- eng = provision.Backend._ensure_backend_available(connect_uri)
- eng.dispose()
- except exception.BackendNotAvailable:
- return False
- else:
- return True
-
-
def get_db_connection_info(conn_pieces):
database = conn_pieces.path.strip('/')
loc_pieces = conn_pieces.netloc.split('@')
@@ -1067,11 +1034,10 @@ def get_non_innodb_tables(connectable, skip_tables=('migrate_version',
'alembic_version')):
"""Get a list of tables which don't use InnoDB storage engine.
- :param connectable: a SQLAlchemy Engine or a Connection instance
- :param skip_tables: a list of tables which might have a different
- storage engine
- """
-
+ :param connectable: a SQLAlchemy Engine or a Connection instance
+ :param skip_tables: a list of tables which might have a different
+ storage engine
+ """
query_str = """
SELECT table_name
FROM information_schema.tables
@@ -1125,6 +1091,118 @@ def get_non_ndbcluster_tables(connectable, skip_tables=None):
return [i[0] for i in nonndbcluster]
+def get_foreign_key_constraint_name(engine, table_name, column_name):
+ """Find the name of foreign key in a table, given constrained column name.
+
+ :param engine: a SQLAlchemy engine (or connection)
+
+ :param table_name: name of table which contains the constraint
+
+ :param column_name: name of column that is constrained by the foreign key.
+
+ :return: the name of the first foreign key constraint which constrains
+ the given column in the given table.
+
+ """
+ insp = inspect(engine)
+ for fk in insp.get_foreign_keys(table_name):
+ if column_name in fk['constrained_columns']:
+ return fk['name']
+
+
+@contextlib.contextmanager
+def suspend_fk_constraints_for_col_alter(
+ engine, table_name, column_name, referents=[]):
+ """Detect foreign key constraints, drop, and recreate.
+
+ This is used to guard against a column ALTER that on some backends
+ cannot proceed unless foreign key constraints are not present.
+
+ e.g.::
+
+ from oslo_db.sqlalchemy.util import (
+ suspend_fk_constraints_for_col_alter
+ )
+
+ with suspend_fk_constraints_for_col_alter(
+ migrate_engine, "user_table",
+ referents=[
+ "local_user", "nonlocal_user", "project"
+ ]):
+ user_table.c.domain_id.alter(nullable=False)
+
+ :param engine: a SQLAlchemy engine (or connection)
+
+ :param table_name: target table name. All foreign key constraints
+ that refer to the table_name / column_name will be dropped and recreated.
+
+ :param column_name: target column name. all foreign key constraints
+ which refer to this column, either partially or fully, will be dropped
+ and recreated.
+
+ :param referents: sequence of string table names to search for foreign
+ key constraints. A future version of this function may no longer
+ require this argument, however for the moment it is required.
+
+ """
+ if (
+ not ndb.ndb_status(engine)
+ ):
+ yield
+ else:
+ with engine.connect() as conn:
+ insp = inspect(conn)
+ fks = []
+ for ref_table_name in referents:
+ for fk in insp.get_foreign_keys(ref_table_name):
+ if not fk.get('name'):
+ raise AssertionError("foreign key hasn't a name.")
+ if fk['referred_table'] == table_name and \
+ column_name in fk['referred_columns']:
+ fk['source_table'] = ref_table_name
+ if 'options' not in fk:
+ fk['options'] = {}
+ fks.append(fk)
+
+ ctx = MigrationContext.configure(conn)
+ op = Operations(ctx)
+
+ for fk in fks:
+ op.drop_constraint(
+ fk['name'], fk['source_table'], type_="foreignkey")
+ yield
+ for fk in fks:
+ op.create_foreign_key(
+ fk['name'], fk['source_table'],
+ fk['referred_table'],
+ fk['constrained_columns'],
+ fk['referred_columns'],
+ onupdate=fk['options'].get('onupdate'),
+ ondelete=fk['options'].get('ondelete'),
+ deferrable=fk['options'].get('deferrable'),
+ initially=fk['options'].get('initially'),
+ )
+
+
+def getargspec(fn):
+ """Inspects a function for its argspec.
+
+ This is to handle a difference between py2/3. The Python 2.x getargspec
+ call is deprecated in Python 3.x, with the suggestion to use the signature
+ call instead.
+
+ To keep compatibility with the results, while avoiding deprecation
+ warnings, this instead will use the getfullargspec instead.
+
+ :param fn: The function to inspect.
+ :returns: The argspec for the function.
+ """
+ if hasattr(pyinspect, 'getfullargspec'):
+ return pyinspect.getfullargspec(fn)
+
+ return pyinspect.getargspec(fn)
+
+
class NonCommittingConnectable(object):
"""A ``Connectable`` substitute which rolls all operations back.
diff --git a/oslo_db/tests/sqlalchemy/test_async_eventlet.py b/oslo_db/tests/sqlalchemy/test_async_eventlet.py
index 34d1f32..7eebed4 100644
--- a/oslo_db/tests/sqlalchemy/test_async_eventlet.py
+++ b/oslo_db/tests/sqlalchemy/test_async_eventlet.py
@@ -77,7 +77,7 @@ class EventletTestMixin(object):
eventlet = importutils.try_import('eventlet')
if eventlet is None:
- return self.skip('eventlet is required for this test')
+ return self.skipTest('eventlet is required for this test')
a_ready = eventlet.event.Event()
a_proceed = eventlet.event.Event()
diff --git a/oslo_db/tests/sqlalchemy/test_enginefacade.py b/oslo_db/tests/sqlalchemy/test_enginefacade.py
index 7a7484f..8b782a3 100644
--- a/oslo_db/tests/sqlalchemy/test_enginefacade.py
+++ b/oslo_db/tests/sqlalchemy/test_enginefacade.py
@@ -926,7 +926,7 @@ class MockFacadeTest(oslo_test_base.BaseTestCase):
getattr, context, 'session'
)
- self.assertRegexpMatches(
+ self.assertRegex(
exc.args[0],
"The 'session' context attribute was requested but it has "
"not been established for this context."
@@ -941,7 +941,7 @@ class MockFacadeTest(oslo_test_base.BaseTestCase):
getattr, context, 'connection'
)
- self.assertRegexpMatches(
+ self.assertRegex(
exc.args[0],
"The 'connection' context attribute was requested but it has "
"not been established for this context."
@@ -954,7 +954,7 @@ class MockFacadeTest(oslo_test_base.BaseTestCase):
getattr, context, 'session'
)
- self.assertRegexpMatches(
+ self.assertRegex(
exc.args[0],
"No TransactionContext is established for "
"this .*RequestContext.* object within the current "
@@ -972,7 +972,7 @@ class MockFacadeTest(oslo_test_base.BaseTestCase):
getattr, context, 'connection'
)
- self.assertRegexpMatches(
+ self.assertRegex(
exc.args[0],
"No TransactionContext is established for "
"this .*RequestContext.* object within the current "
@@ -990,7 +990,7 @@ class MockFacadeTest(oslo_test_base.BaseTestCase):
getattr, context, 'transaction'
)
- self.assertRegexpMatches(
+ self.assertRegex(
exc.args[0],
"No TransactionContext is established for "
"this .*RequestContext.* object within the current "
@@ -1008,7 +1008,7 @@ class MockFacadeTest(oslo_test_base.BaseTestCase):
getattr, context, 'transaction_ctx'
)
- self.assertRegexpMatches(
+ self.assertRegex(
exc.args[0],
"No TransactionContext is established for "
"this .*RequestContext.* object within the current "
diff --git a/oslo_db/tests/sqlalchemy/test_exc_filters.py b/oslo_db/tests/sqlalchemy/test_exc_filters.py
index 93ad770..9c2b417 100644
--- a/oslo_db/tests/sqlalchemy/test_exc_filters.py
+++ b/oslo_db/tests/sqlalchemy/test_exc_filters.py
@@ -768,7 +768,7 @@ class TestDBDataErrorSQLite(_SQLAExceptionMatcher, test_base.DbTestCase):
super(TestDBDataErrorSQLite, self).setUp()
if six.PY3:
- self.skip("SQLite database supports unicode value for python3")
+ self.skipTest("SQLite database supports unicode value for python3")
meta = sqla.MetaData(bind=self.engine)
diff --git a/oslo_db/tests/sqlalchemy/test_fixtures.py b/oslo_db/tests/sqlalchemy/test_fixtures.py
index 6950c3b..af6bfa5 100644
--- a/oslo_db/tests/sqlalchemy/test_fixtures.py
+++ b/oslo_db/tests/sqlalchemy/test_fixtures.py
@@ -219,8 +219,8 @@ class LegacyBaseClassTest(oslo_test_base.BaseTestCase):
try:
provision.DatabaseResource(base_cls.FIXTURE.DRIVER)
except exception.BackendNotAvailable:
- self.skip("Backend %s is not available" %
- base_cls.FIXTURE.DRIVER)
+ self.skipTest("Backend %s is not available" %
+ base_cls.FIXTURE.DRIVER)
class SomeTest(base_cls):
def runTest(self):
diff --git a/oslo_db/tests/sqlalchemy/test_migration_common.py b/oslo_db/tests/sqlalchemy/test_migration_common.py
index 870f208..3f5f8ff 100644
--- a/oslo_db/tests/sqlalchemy/test_migration_common.py
+++ b/oslo_db/tests/sqlalchemy/test_migration_common.py
@@ -195,16 +195,16 @@ class TestMigrationCommon(test_base.DbTestCase):
@mock.patch.object(versioning_api, 'upgrade')
def test_db_sync_script_not_present(self, upgrade):
# For non existent migration script file sqlalchemy-migrate will raise
- # VersionNotFoundError which will be wrapped in DbMigrationError.
+ # VersionNotFoundError which will be wrapped in DBMigrationError.
upgrade.side_effect = migrate_exception.VersionNotFoundError
- self.assertRaises(db_exception.DbMigrationError,
+ self.assertRaises(db_exception.DBMigrationError,
migration.db_sync, self.engine, self.path,
self.test_version + 1)
@mock.patch.object(versioning_api, 'upgrade')
def test_db_sync_known_error_raised(self, upgrade):
upgrade.side_effect = migrate_exception.KnownError
- self.assertRaises(db_exception.DbMigrationError,
+ self.assertRaises(db_exception.DBMigrationError,
migration.db_sync, self.engine, self.path,
self.test_version + 1)
diff --git a/oslo_db/tests/sqlalchemy/test_ndb.py b/oslo_db/tests/sqlalchemy/test_ndb.py
index 1d84f41..421befe 100644
--- a/oslo_db/tests/sqlalchemy/test_ndb.py
+++ b/oslo_db/tests/sqlalchemy/test_ndb.py
@@ -176,7 +176,7 @@ class NDBOpportunisticTestCase(
try:
self.test_table.create(self.engine)
except exception.DBNotSupportedError:
- self.skip("MySQL NDB Cluster not available")
+ self.skipTest("MySQL NDB Cluster not available")
def test_ndb_enabled(self):
self.init_db(True)
diff --git a/oslo_db/tests/sqlalchemy/test_provision.py b/oslo_db/tests/sqlalchemy/test_provision.py
index 8f931bd..8f0928f 100644
--- a/oslo_db/tests/sqlalchemy/test_provision.py
+++ b/oslo_db/tests/sqlalchemy/test_provision.py
@@ -176,7 +176,7 @@ class RetainSchemaTest(oslo_test_base.BaseTestCase):
database_resource = provision.DatabaseResource(
self.DRIVER, provision_new_database=True)
except exception.BackendNotAvailable:
- self.skip("database not available")
+ self.skipTest("database not available")
schema_resource = provision.SchemaResource(
database_resource, self._gen_schema)
@@ -244,7 +244,7 @@ class AdHocURLTest(oslo_test_base.BaseTestCase):
mysql_backend = provision.Backend.backend_for_database_type(
"mysql")
except exception.BackendNotAvailable:
- self.skip("mysql backend not available")
+ self.skipTest("mysql backend not available")
mysql_backend.create_named_database("adhoc_test")
self.addCleanup(
diff --git a/oslo_db/tests/sqlalchemy/test_utils.py b/oslo_db/tests/sqlalchemy/test_utils.py
index ad26ee4..d6fb3a3 100644
--- a/oslo_db/tests/sqlalchemy/test_utils.py
+++ b/oslo_db/tests/sqlalchemy/test_utils.py
@@ -22,7 +22,8 @@ import sqlalchemy
from sqlalchemy.dialects import mysql
from sqlalchemy import Boolean, Index, Integer, DateTime, String, SmallInteger
from sqlalchemy import CheckConstraint
-from sqlalchemy import MetaData, Table, Column, ForeignKey
+from sqlalchemy import MetaData, Table, Column
+from sqlalchemy import ForeignKey, ForeignKeyConstraint
from sqlalchemy.engine import reflection
from sqlalchemy.engine import url as sa_url
from sqlalchemy.exc import OperationalError
@@ -30,6 +31,7 @@ from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import mapper
from sqlalchemy.orm import Session
+from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy.sql.expression import cast
from sqlalchemy.sql import select
from sqlalchemy.types import UserDefinedType, NullType
@@ -203,17 +205,11 @@ class TestPaginateQuery(test_base.BaseTestCase):
str(exception.InvalidSortKey()))
self.assertEqual("Sort key supplied is invalid: lol",
str(exception.InvalidSortKey("lol")))
- self.assertEqual("Sort key supplied is invalid: lol",
- exception.InvalidSortKey("lol").message)
def test_invalid_unicode_paramater_str(self):
self.assertEqual(
"Invalid Parameter: Encoding directive wasn't provided.",
str(exception.DBInvalidUnicodeParameter()))
- self.assertEqual(
- "Invalid Parameter: Encoding directive wasn't provided.",
- exception.DBInvalidUnicodeParameter().message
- )
def test_paginate_query_attribute_error(self):
sqlalchemy.asc(self.model.user_id).AndReturn('asc')
@@ -290,9 +286,18 @@ class TestPaginateQuery(test_base.BaseTestCase):
self.query.order_by('desc_1').AndReturn(self.query)
self.mox.StubOutWithMock(sqlalchemy.sql, 'and_')
- sqlalchemy.sql.and_(mock.ANY).AndReturn('some_crit')
- sqlalchemy.sql.and_(mock.ANY, mock.ANY).AndReturn('another_crit')
self.mox.StubOutWithMock(sqlalchemy.sql, 'or_')
+ self.mox.StubOutWithMock(self.model.user_id, 'is_')
+
+ self.model.user_id.is_(None).AndReturn('desc_null_filter_1')
+ self.model.user_id.is_(None).AndReturn('desc_null_filter_2')
+ sqlalchemy.sql.or_(mock.ANY, 'desc_null_filter_2').AndReturn('or_1')
+
+ self.model.project_id.is_(None).AndReturn('asc_null_filter')
+ sqlalchemy.sql.or_(mock.ANY, 'asc_null_filter').AndReturn('or_2')
+
+ sqlalchemy.sql.and_('or_1').AndReturn('some_crit')
+ sqlalchemy.sql.and_(mock.ANY, 'or_2').AndReturn('another_crit')
sqlalchemy.sql.or_('some_crit', 'another_crit').AndReturn('some_f')
self.query.filter('some_f').AndReturn(self.query)
self.query.limit(5).AndReturn(self.query)
@@ -320,8 +325,14 @@ class TestPaginateQuery(test_base.BaseTestCase):
self.query.order_by('desc_1').AndReturn(self.query)
self.mox.StubOutWithMock(sqlalchemy.sql, 'and_')
- sqlalchemy.sql.and_(mock.ANY).AndReturn('some_crit')
self.mox.StubOutWithMock(sqlalchemy.sql, 'or_')
+ self.mox.StubOutWithMock(self.model.user_id, 'is_')
+
+ self.model.user_id.is_(None).AndReturn('asc_null_filter_1')
+ self.model.user_id.is_(None).AndReturn('asc_null_filter_2')
+ sqlalchemy.sql.or_(mock.ANY, 'asc_null_filter_2').AndReturn('or_1')
+
+ sqlalchemy.sql.and_('or_1').AndReturn('some_crit')
sqlalchemy.sql.or_('some_crit').AndReturn('some_f')
self.query.filter('some_f').AndReturn(self.query)
self.query.limit(5).AndReturn(self.query)
@@ -331,6 +342,53 @@ class TestPaginateQuery(test_base.BaseTestCase):
marker=self.marker,
sort_dirs=['asc-nullslast', 'desc-nullsfirst'])
+ def test_paginate_query_marker_null_with_two_primary_keys(self):
+ self.mox.StubOutWithMock(self.model.user_id, 'isnot')
+ self.model.user_id.isnot(None).AndReturn('asc_null_1')
+ sqlalchemy.desc('asc_null_1').AndReturn('asc_null_2')
+ self.query.order_by('asc_null_2').AndReturn(self.query)
+
+ sqlalchemy.asc(self.model.user_id).AndReturn('asc_1')
+ self.query.order_by('asc_1').AndReturn(self.query)
+
+ self.mox.StubOutWithMock(self.model.updated_at, 'is_')
+ self.model.updated_at.is_(None).AndReturn('desc_null_1')
+ sqlalchemy.desc('desc_null_1').AndReturn('desc_null_2')
+ self.query.order_by('desc_null_2').AndReturn(self.query)
+
+ sqlalchemy.desc(self.model.updated_at).AndReturn('desc_1')
+ self.query.order_by('desc_1').AndReturn(self.query)
+
+ self.mox.StubOutWithMock(self.model.project_id, 'is_')
+ self.model.project_id.is_(None).AndReturn('desc_null_3')
+ sqlalchemy.desc('desc_null_3').AndReturn('desc_null_4')
+ self.query.order_by('desc_null_4').AndReturn(self.query)
+
+ sqlalchemy.desc(self.model.project_id).AndReturn('desc_4')
+ self.query.order_by('desc_4').AndReturn(self.query)
+
+ self.mox.StubOutWithMock(sqlalchemy.sql, 'and_')
+ self.mox.StubOutWithMock(sqlalchemy.sql, 'or_')
+ self.mox.StubOutWithMock(self.model.user_id, 'is_')
+
+ self.model.user_id.is_(None).AndReturn('asc_null_filter_1')
+ self.model.user_id.is_(None).AndReturn('asc_null_filter_2')
+ self.model.project_id.is_(None).AndReturn('desc_null_filter_3')
+
+ sqlalchemy.sql.or_(mock.ANY, 'asc_null_filter_2').AndReturn('or_1')
+ sqlalchemy.sql.or_(mock.ANY, 'desc_null_filter_3').AndReturn('or_2')
+ sqlalchemy.sql.and_('or_1').AndReturn('some_crit')
+ sqlalchemy.sql.and_(mock.ANY, 'or_2').AndReturn('other_crit')
+ sqlalchemy.sql.or_('some_crit', 'other_crit').AndReturn('some_f')
+ self.query.filter('some_f').AndReturn(self.query)
+ self.query.limit(5).AndReturn(self.query)
+ self.mox.ReplayAll()
+ utils.paginate_query(self.query, self.model, 5,
+ ['user_id', 'updated_at', 'project_id'],
+ marker=self.marker,
+ sort_dirs=['asc-nullslast', 'desc-nullsfirst',
+ 'desc-nullsfirst'])
+
def test_paginate_query_value_error(self):
sqlalchemy.asc(self.model.user_id).AndReturn('asc_1')
self.query.order_by('asc_1').AndReturn(self.query)
@@ -768,11 +826,138 @@ class TestMigrationUtils(db_test_base.DbTestCase):
table = Table(table_name, self.meta, autoload=True)
# NOTE(I159): if the CHECK constraint has been dropped (expected
# behavior), any integer value can be inserted, otherwise only 1 or 0.
- self.engine.execute(table.insert({'deleted': 10}))
+ # NOTE(zzzeek): SQLAlchemy 1.2 Boolean type will disallow non 1/0
+ # value here, 1.1 also coerces to "1/0" so use raw SQL to test the
+ # constraint
+ with self.engine.connect() as conn:
+ conn.execute(
+ "INSERT INTO abc (deleted) VALUES (?)",
+ (10, )
+ )
+
+ self.assertEqual(
+ 10,
+ conn.scalar("SELECT deleted FROM abc")
+ )
+
+ def test_get_foreign_key_constraint_name(self):
+ table_1 = Table('table_name_1', self.meta,
+ Column('id', Integer, primary_key=True),
+ Column('deleted', Integer))
+ table_2 = Table('table_name_2', self.meta,
+ Column('id', Integer, primary_key=True),
+ Column('foreign_id', Integer),
+ ForeignKeyConstraint(['foreign_id'],
+ ['table_name_1.id'],
+ name='table_name_2_fk1'),
+ Column('deleted', Integer))
+
+ self.meta.create_all(tables=[table_1, table_2])
+ fkc = utils.get_foreign_key_constraint_name(self.engine,
+ 'table_name_2',
+ 'foreign_id')
+ self.assertEqual(fkc, 'table_name_2_fk1')
+ @db_test_base.backend_specific('mysql', 'postgresql')
+ def test_suspend_fk_constraints_for_col_alter(self):
-class PostgesqlTestMigrations(TestMigrationUtils,
- db_test_base.PostgreSQLOpportunisticTestCase):
+ a = Table(
+ 'a', self.meta,
+ Column('id', Integer, primary_key=True)
+ )
+ b = Table(
+ 'b', self.meta,
+ Column('key', Integer),
+ Column('archive_id', Integer),
+ Column('aid', ForeignKey('a.id')),
+ PrimaryKeyConstraint("key", "archive_id")
+ )
+ c = Table(
+ 'c', self.meta,
+ Column('id', Integer, primary_key=True),
+ Column('aid', ForeignKey('a.id')),
+ Column('key', Integer),
+ Column('archive_id', Integer),
+ ForeignKeyConstraint(
+ ['key', 'archive_id'], ['b.key', 'b.archive_id'],
+ name="some_composite_fk")
+ )
+ self.meta.create_all(tables=[a, b, c])
+
+ def get_fk_entries():
+ inspector = sqlalchemy.inspect(self.engine)
+ return sorted(
+ inspector.get_foreign_keys('b') +
+ inspector.get_foreign_keys('c'),
+ key=lambda fk: fk['referred_table']
+ )
+
+ def normalize_fk_entries(fks):
+ return [{
+ 'name': fk['name'],
+ 'referred_columns': fk['referred_columns'],
+ 'referred_table': fk['referred_table'],
+ } for fk in fks]
+
+ existing_foreign_keys = get_fk_entries()
+ self.assertEqual(
+ [{'name': mock.ANY,
+ 'referred_columns': ['id'], 'referred_table': 'a'},
+ {'name': mock.ANY,
+ 'referred_columns': ['id'], 'referred_table': 'a'},
+ {'name': 'some_composite_fk',
+ 'referred_columns': ['key', 'archive_id'],
+ 'referred_table': 'b'}],
+ normalize_fk_entries(existing_foreign_keys)
+ )
+
+ with mock.patch("oslo_db.sqlalchemy.ndb.ndb_status",
+ mock.Mock(return_value=True)):
+ with utils.suspend_fk_constraints_for_col_alter(
+ self.engine, 'a', 'id', referents=['b', 'c']):
+ no_a_foreign_keys = get_fk_entries()
+ self.assertEqual(
+ [{'name': 'some_composite_fk',
+ 'referred_columns': ['key', 'archive_id'],
+ 'referred_table': 'b'}],
+ normalize_fk_entries(no_a_foreign_keys)
+ )
+
+ self.assertEqual(existing_foreign_keys, get_fk_entries())
+
+ with mock.patch("oslo_db.sqlalchemy.ndb.ndb_status",
+ mock.Mock(return_value=True)):
+ with utils.suspend_fk_constraints_for_col_alter(
+ self.engine, 'b', 'archive_id', referents=['c']):
+ self.assertEqual(
+ [{'name': mock.ANY,
+ 'referred_columns': ['id'], 'referred_table': 'a'},
+ {'name': mock.ANY,
+ 'referred_columns': ['id'], 'referred_table': 'a'}],
+ normalize_fk_entries(get_fk_entries())
+ )
+
+ self.assertEqual(existing_foreign_keys, get_fk_entries())
+
+ with utils.suspend_fk_constraints_for_col_alter(
+ self.engine, 'a', 'id', referents=['b', 'c']):
+ self.assertEqual(existing_foreign_keys, get_fk_entries())
+
+ if self.engine.name == 'mysql':
+ self.engine.dialect._oslodb_enable_ndb_support = True
+
+ self.addCleanup(
+ setattr, self.engine.dialect, "_oslodb_enable_ndb_support",
+ False
+ )
+
+ with utils.suspend_fk_constraints_for_col_alter(
+ self.engine, 'a', 'id', referents=['b', 'c']):
+ self.assertEqual(no_a_foreign_keys, get_fk_entries())
+
+
+class PostgresqlTestMigrations(TestMigrationUtils,
+ db_test_base.PostgreSQLOpportunisticTestCase):
"""Test migrations on PostgreSQL."""
pass
@@ -813,34 +998,6 @@ class TestConnectionUtils(test_utils.BaseTestCase):
patch_onconnect.start()
self.addCleanup(patch_onconnect.stop)
- def test_connect_string(self):
- connect_string = utils.get_connect_string(**self.full_credentials)
- self.assertEqual(self.connect_string, connect_string)
-
- def test_connect_string_sqlite(self):
- sqlite_credentials = {'backend': 'sqlite', 'database': 'test.db'}
- connect_string = utils.get_connect_string(**sqlite_credentials)
- self.assertEqual('sqlite:///test.db', connect_string)
-
- def test_is_backend_avail(self):
- self.mox.StubOutWithMock(sqlalchemy.engine.base.Engine, 'connect')
- fake_connection = self.mox.CreateMockAnything()
- fake_connection.close()
- sqlalchemy.engine.base.Engine.connect().AndReturn(fake_connection)
- self.mox.ReplayAll()
-
- self.assertTrue(utils.is_backend_avail(**self.full_credentials))
-
- def test_is_backend_unavail(self):
- log = self.useFixture(fixtures.FakeLogger())
- err = OperationalError("Can't connect to database", None, None)
- error_msg = "The postgresql backend is unavailable: %s\n" % err
- self.mox.StubOutWithMock(sqlalchemy.engine.base.Engine, 'connect')
- sqlalchemy.engine.base.Engine.connect().AndRaise(err)
- self.mox.ReplayAll()
- self.assertFalse(utils.is_backend_avail(**self.full_credentials))
- self.assertEqual(error_msg, log.output)
-
def test_ensure_backend_available(self):
self.mox.StubOutWithMock(sqlalchemy.engine.base.Engine, 'connect')
fake_connection = self.mox.CreateMockAnything()
@@ -894,11 +1051,6 @@ class TestConnectionUtils(test_utils.BaseTestCase):
self.assertEqual(('dude', 'pass', 'test', 'localhost'),
utils.get_db_connection_info(conn_pieces))
- def test_connect_string_host(self):
- self.full_credentials['host'] = 'myhost'
- connect_string = utils.get_connect_string(**self.full_credentials)
- self.assertEqual('postgresql://dude:pass@myhost/test', connect_string)
-
class MyModelSoftDeletedProjectId(declarative_base(), models.ModelBase,
models.SoftDeleteMixin):
diff --git a/releasenotes/notes/MySQL-python-no-longer-tested-2a6c32cce6b03215.yaml b/releasenotes/notes/MySQL-python-no-longer-tested-2a6c32cce6b03215.yaml
new file mode 100644
index 0000000..547b926
--- /dev/null
+++ b/releasenotes/notes/MySQL-python-no-longer-tested-2a6c32cce6b03215.yaml
@@ -0,0 +1,8 @@
+---
+deprecations:
+ - |
+ PyMySQL is a default MySQL DB API driver for oslo.db, as well as for the whole
+ OpenStack. So far it was possible to use MySQL-python as an alternative DB API driver.
+ This driver is no longer being tested in this release, hence it should be considered
+ unsupported. Please switch to PyMySQL, which is an adequate replacement. Refer to
+ https://wiki.openstack.org/wiki/PyMySQL_evaluation for details.
diff --git a/releasenotes/source/conf.py b/releasenotes/source/conf.py
index eb70f78..bded8c8 100644
--- a/releasenotes/source/conf.py
+++ b/releasenotes/source/conf.py
@@ -62,16 +62,11 @@ master_doc = 'index'
project = u'oslo.db Release Notes'
copyright = u'2016, oslo.db Developers'
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-# The full version, including alpha/beta/rc tags.
-import pkg_resources
-release = pkg_resources.get_distribution('oslo.db').version
+# Release notes do not need a version in the title, they span
+# multiple versions.
+release = ''
# The short X.Y version.
-version = release
+version = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po b/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po
index ed16396..e6fcd71 100644
--- a/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po
+++ b/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po
@@ -1,37 +1,174 @@
# Andi Chandler <andi@gowling.com>, 2016. #zanata
+# Andi Chandler <andi@gowling.com>, 2017. #zanata
msgid ""
msgstr ""
-"Project-Id-Version: oslo.db Release Notes 4.18.1.dev1\n"
+"Project-Id-Version: oslo.db Release Notes\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2017-03-14 11:56+0000\n"
+"POT-Creation-Date: 2017-12-05 12:09+0000\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"PO-Revision-Date: 2016-06-28 05:55+0000\n"
+"PO-Revision-Date: 2017-12-05 10:29+0000\n"
"Last-Translator: Andi Chandler <andi@gowling.com>\n"
"Language-Team: English (United Kingdom)\n"
"Language: en-GB\n"
"X-Generator: Zanata 3.9.6\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+msgid "2.6.0-9"
+msgstr "2.6.0-9"
+
+msgid "4.12.0"
+msgstr "4.12.0"
+
+msgid "4.15.0"
+msgstr "4.15.0"
+
+msgid "4.19.0"
+msgstr "4.19.0"
+
+msgid "4.22.0"
+msgstr "4.22.0"
+
+msgid "4.26.0"
+msgstr "4.26.0"
+
+msgid "4.30.0"
+msgstr "4.30.0"
+
msgid "4.6.0"
msgstr "4.6.0"
+msgid "4.8.0"
+msgstr "4.8.0"
+
+msgid "4.9.0"
+msgstr "4.9.0"
+
+msgid "Bug Fixes"
+msgstr "Bug Fixes"
+
+msgid ""
+"Decorator ``oslo_db.api.wrap_db_retry`` now defaults to 10 retries. "
+"Previously the number of attempts was 0, and users had to explicitly pass "
+"``max_retry_interval`` value greater than 0 to actually enable retries on "
+"errors."
+msgstr ""
+"Decorator ``oslo_db.api.wrap_db_retry`` now defaults to 10 retries. "
+"Previously the number of attempts was 0, and users had to explicitly pass "
+"``max_retry_interval`` value greater than 0 to actually enable retries on "
+"errors."
+
+msgid "Deprecation Notes"
+msgstr "Deprecation Notes"
+
msgid "For details, please see the following LP:"
msgstr "For details, please see the following LP:"
msgid "Introduce reno for deployer release notes."
msgstr "Introduce reno for deployer release notes."
+msgid ""
+"It is strongly recommended to use the `PyMySQL <https://pypi.python.org/pypi/"
+"PyMySQL>`__ driver when connecting to a MySQL-compatible database to ensure "
+"the best compatibility with the concurrency library eventlet. To use "
+"PyMySQL, ensure the connection URL is specified with ``mysql+pymysql://`` as "
+"the scheme."
+msgstr ""
+"It is strongly recommended to use the `PyMySQL <https://pypi.python.org/pypi/"
+"PyMySQL>`__ driver when connecting to a MySQL-compatible database to ensure "
+"the best compatibility with the concurrency library eventlet. To use "
+"PyMySQL, ensure the connection URL is specified with ``mysql+pymysql://`` as "
+"the scheme."
+
msgid "Liberty Series Release Notes"
msgstr "Liberty Series Release Notes"
msgid "Mitaka Series Release Notes"
msgstr "Mitaka Series Release Notes"
+msgid "New Features"
+msgstr "New Features"
+
+msgid "Newton Series Release Notes"
+msgstr "Newton Series Release Notes"
+
+msgid "Ocata Series Release Notes"
+msgstr "Ocata Series Release Notes"
+
msgid "Other Notes"
msgstr "Other Notes"
+msgid "Pike Series Release Notes"
+msgstr "Pike Series Release Notes"
+
+msgid ""
+"PyMySQL is a default MySQL DB API driver for oslo.db, as well as for the "
+"whole OpenStack. So far it was possible to use MySQL-python as an "
+"alternative DB API driver. This driver is no longer being tested in this "
+"release, hence it should be considered unsupported. Please switch to "
+"PyMySQL, which is an adequate replacement. Refer to https://wiki.openstack."
+"org/wiki/PyMySQL_evaluation for details."
+msgstr ""
+"PyMySQL is a default MySQL DB API driver for oslo.db, as well as for the "
+"whole OpenStack. So far it was possible to use MySQL-python as an "
+"alternative DB API driver. This driver is no longer being tested in this "
+"release, hence it should be considered unsupported. Please switch to "
+"PyMySQL, which is an adequate replacement. Refer to https://wiki.openstack."
+"org/wiki/PyMySQL_evaluation for details."
+
+msgid ""
+"The allowed values for the ``connection_debug`` option are now restricted to "
+"the range between 0 and 100 (inclusive). Previously a number lower than 0 or "
+"higher than 100 could be given without error. But now, a "
+"``ConfigFileValueError`` will be raised when the option value is outside "
+"this range."
+msgstr ""
+"The allowed values for the ``connection_debug`` option are now restricted to "
+"the range between 0 and 100 (inclusive). Previously a number lower than 0 or "
+"higher than 100 could be given without error. But now, a "
+"``ConfigFileValueError`` will be raised when the option value is outside "
+"this range."
+
+msgid ""
+"The configuration option ``idle_timeout`` is now deprecated and has been "
+"renamed to ``connection_recycle_time``, including within the main oslo.db "
+"options, as well as in the keyword arguments to ``engines.create_engine()``, "
+"``enginefacade.configure()`` and ``enginefacade.configure_defaults()``. The "
+"new name more accurately describes what this option does, in that it is not "
+"directly related to the \"idle\" time of the connection itself, nor is the "
+"connection disconnected at any specific time. It refers to a rule stating "
+"that any connection which has been present more than N seconds as a member "
+"of the connection pool will be automatically discarded and replaced the next "
+"time it is checked out from the pool."
+msgstr ""
+"The configuration option ``idle_timeout`` is now deprecated and has been "
+"renamed to ``connection_recycle_time``, including within the main oslo.db "
+"options, as well as in the keyword arguments to ``engines.create_engine()``, "
+"``enginefacade.configure()`` and ``enginefacade.configure_defaults()``. The "
+"new name more accurately describes what this option does, in that it is not "
+"directly related to the \"idle\" time of the connection itself, nor is the "
+"connection disconnected at any specific time. It refers to a rule stating "
+"that any connection which has been present more than N seconds as a member "
+"of the connection pool will be automatically discarded and replaced the next "
+"time it is checked out from the pool."
+
+msgid ""
+"The configuration option ``sqlite_db`` is now deprecated and will be removed "
+"in the future. Please use configuration option ``connection`` or "
+"``slave_connection`` to connect to the database."
+msgstr ""
+"The configuration option ``sqlite_db`` is now deprecated and will be removed "
+"in the future. Please use configuration option ``connection`` or "
+"``slave_connection`` to connect to the database."
+
+msgid ""
+"The configuration option ``sqlite_db`` is removed. Pease use configuration "
+"option ``connection`` or ``slave_connection`` to connect to the database."
+msgstr ""
+"The configuration option ``sqlite_db`` is removed. Please use configuration "
+"option ``connection`` or ``slave_connection`` to connect to the database."
+
msgid ""
"The default value of ``max_overflow`` config option has been increased from "
"10 to 50 in order to allow OpenStack services heavily using DBs to better "
@@ -66,6 +203,37 @@ msgid "and the ML thread:"
msgstr "and the ML thread:"
msgid ""
+"base test classes from ``oslo_db.sqlalchemy.test_base`` are deprecated in "
+"favor of new fixtures introduced in ``oslo_db.sqlalchemy.test_fixtures`` "
+"module"
+msgstr ""
+"base test classes from ``oslo_db.sqlalchemy.test_base`` are deprecated in "
+"flavour of new fixtures introduced in ``oslo_db.sqlalchemy.test_fixtures`` "
+"module"
+
+msgid ""
+"class ``InsertFromSelect`` from module ``oslo_db.sqlalchemy.utils`` is "
+"deprecated in favor of ``sqlalchemy.sql.expression.Insert.from_select()`` "
+"method of Insert expression, that is available in SQLAlchemy versions 1.0.0 "
+"and newer"
+msgstr ""
+"class ``InsertFromSelect`` from module ``oslo_db.sqlalchemy.utils`` is "
+"deprecated in favor of ``sqlalchemy.sql.expression.Insert.from_select()`` "
+"method of Insert expression, that is available in SQLAlchemy versions 1.0.0 "
+"and newer"
+
+msgid ""
+"enginefacade decorators can now be used for class and instance methods, "
+"which implicitly receive the first positional argument. Previously, it was "
+"required that all decorated functions receive a context value as the first "
+"argument."
+msgstr ""
+"enginefacade decorators can now be used for class and instance methods, "
+"which implicitly receive the first positional argument. Previously, it was "
+"required that all decorated functions receive a context value as the first "
+"argument."
+
+msgid ""
"http://dev.mysql.com/doc/refman/5.7/en/server-system-variables."
"html#sysvar_max_connections http://www.postgresql.org/docs/current/static/"
"runtime-config-connection.html#GUC-MAX-CONNECTIONS"
@@ -84,3 +252,12 @@ msgstr "https://bugs.launchpad.net/oslo.db/+bug/1535375"
msgid "oslo.db Release Notes"
msgstr "oslo.db Release Notes"
+
+msgid ""
+"oslo.db now logs a warning when the connection URL does not explicitly "
+"mention a driver. The default driver is still used, but in some cases, such "
+"as MySQL, the default is incompatible with the concurrency library eventlet."
+msgstr ""
+"oslo.db now logs a warning when the connection URL does not explicitly "
+"mention a driver. The default driver is still used, but in some cases, such "
+"as MySQL, the default is incompatible with the concurrency library eventlet."
diff --git a/requirements.txt b/requirements.txt
index b6f7af4..0872ada 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,10 +5,10 @@
pbr!=2.1.0,>=2.0.0 # Apache-2.0
alembic>=0.8.10 # MIT
debtcollector>=1.2.0 # Apache-2.0
-oslo.i18n!=3.15.2,>=2.1.0 # Apache-2.0
-oslo.config!=4.3.0,!=4.4.0,>=4.0.0 # Apache-2.0
-oslo.utils>=3.20.0 # Apache-2.0
+oslo.i18n>=3.15.3 # Apache-2.0
+oslo.config>=5.1.0 # Apache-2.0
+oslo.utils>=3.33.0 # Apache-2.0
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
sqlalchemy-migrate>=0.11.0 # Apache-2.0
stevedore>=1.20.0 # Apache-2.0
-six>=1.9.0 # MIT
+six>=1.10.0 # MIT
diff --git a/setup.cfg b/setup.cfg
index 8921213..815a378 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -22,31 +22,26 @@ classifier =
# So e.g. nova can test-depend on oslo.db[mysql]
mysql =
PyMySQL>=0.7.6 # MIT License
-# or oslo.db[mysql-c]
-mysql-c =
- MySQL-python:python_version=='2.7' # GPL with FOSS exception
# or oslo.db[postgresql]
postgresql =
- psycopg2>=2.5 # LGPL/ZPL
+ psycopg2>=2.6.2 # LGPL/ZPL
# Dependencies for testing oslo.db itself.
test =
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
coverage!=4.4,>=4.0 # Apache-2.0
- doc8 # Apache-2.0
eventlet!=0.18.3,!=0.20.1,<0.21.0,>=0.18.2 # MIT
fixtures>=3.0.0 # Apache-2.0/BSD
mock>=2.0.0 # BSD
- python-subunit>=0.0.18 # Apache-2.0/BSD
- sphinx>=1.6.2 # BSD
- openstackdocstheme>=1.16.0 # Apache-2.0
+ python-subunit>=1.0.0 # Apache-2.0/BSD
oslotest>=1.10.0 # Apache-2.0
- oslo.context>=2.14.0 # Apache-2.0
+ oslo.context>=2.19.2 # Apache-2.0
testrepository>=0.0.18 # Apache-2.0/BSD
- testtools>=1.4.0 # MIT
- os-testr>=0.8.0 # Apache-2.0
- reno>=2.5.0 # Apache-2.0
+ testtools>=2.2.0 # MIT
+ os-testr>=1.0.0 # Apache-2.0
+# Bandit security code scanner
+ bandit>=1.1.0 # Apache-2.0
fixtures =
- testresources>=0.2.4 # Apache-2.0/BSD
+ testresources>=2.0.0 # Apache-2.0/BSD
testscenarios>=0.4 # Apache-2.0/BSD
pifpaf =
pifpaf>=0.10.0 # Apache-2.0
diff --git a/tox.ini b/tox.ini
index 73eb107..3afe728 100644
--- a/tox.ini
+++ b/tox.ini
@@ -33,14 +33,11 @@ commands =
env TEST_EVENTLET=0 bash tools/pretty_tox.sh '{posargs}'
env TEST_EVENTLET=1 bash tools/pretty_tox.sh '{posargs}'
-[testenv:mysql-python]
-deps = .[mysql-c,postgresql,test,fixtures]
-setenv =
- {[testenv]setenv}
- OS_TEST_DBAPI_ADMIN_CONNECTION=mysql://openstack_citest:openstack_citest@localhost/;postgresql://openstack_citest:openstack_citest@localhost/postgres;sqlite://
-
[testenv:pep8]
-commands = flake8
+commands =
+ flake8
+ # Run security linter
+ bandit -r oslo_db -x tests -n5 --skip B105,B311
[testenv:venv]
commands = {posargs}
@@ -49,11 +46,13 @@ commands = {posargs}
commands = python setup.py test --coverage --coverage-package-name=oslo_db --testr-args='{posargs}'
[testenv:docs]
+deps = -r{toxinidir}/doc/requirements.txt
commands =
doc8 -e .rst CONTRIBUTING.rst HACKING.rst README.rst doc/source
- python setup.py build_sphinx
+ sphinx-build -b html doc/source doc/build/html
[testenv:releasenotes]
+deps = -r{toxinidir}/doc/requirements.txt
commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
[flake8]