summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.zuul.yaml14
-rw-r--r--README.rst2
-rw-r--r--doc/requirements.txt12
-rw-r--r--lower-constraints.txt71
-rw-r--r--oslo_db/locale/en_GB/LC_MESSAGES/oslo_db.po8
-rw-r--r--oslo_db/options.py5
-rw-r--r--oslo_db/sqlalchemy/enginefacade.py26
-rw-r--r--oslo_db/sqlalchemy/engines.py20
-rw-r--r--oslo_db/sqlalchemy/test_base.py13
-rw-r--r--oslo_db/sqlalchemy/test_fixtures.py5
-rw-r--r--oslo_db/sqlalchemy/test_migrations.py8
-rw-r--r--oslo_db/sqlalchemy/update_match.py6
-rw-r--r--oslo_db/sqlalchemy/utils.py10
-rw-r--r--oslo_db/tests/sqlalchemy/base.py35
-rw-r--r--oslo_db/tests/sqlalchemy/test_async_eventlet.py4
-rw-r--r--oslo_db/tests/sqlalchemy/test_enginefacade.py36
-rw-r--r--oslo_db/tests/sqlalchemy/test_exc_filters.py30
-rw-r--r--oslo_db/tests/sqlalchemy/test_fixtures.py3
-rw-r--r--oslo_db/tests/sqlalchemy/test_migration_common.py2
-rw-r--r--oslo_db/tests/sqlalchemy/test_migrations.py12
-rw-r--r--oslo_db/tests/sqlalchemy/test_models.py4
-rw-r--r--oslo_db/tests/sqlalchemy/test_provision.py6
-rw-r--r--oslo_db/tests/sqlalchemy/test_sqlalchemy.py88
-rw-r--r--oslo_db/tests/sqlalchemy/test_types.py2
-rw-r--r--oslo_db/tests/sqlalchemy/test_update_match.py6
-rw-r--r--oslo_db/tests/sqlalchemy/test_utils.py14
-rw-r--r--playbooks/legacy/oslo.db-tox-mysql-python/post.yaml67
-rw-r--r--playbooks/legacy/oslo.db-tox-mysql-python/run.yaml86
-rw-r--r--releasenotes/notes/add_connection_parameters-231aa7d8b7d2d416.yaml7
-rw-r--r--releasenotes/notes/fix_synchronous_reader-ca442ca9f07470ec.yaml8
-rw-r--r--releasenotes/source/index.rst1
-rw-r--r--releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po25
-rw-r--r--releasenotes/source/queens.rst6
-rw-r--r--requirements.txt8
-rw-r--r--setup.cfg22
-rw-r--r--test-requirements.txt18
-rwxr-xr-xtools/tox_install.sh30
-rw-r--r--tox.ini27
38 files changed, 414 insertions, 333 deletions
diff --git a/.zuul.yaml b/.zuul.yaml
index 2398cf0..67a39c4 100644
--- a/.zuul.yaml
+++ b/.zuul.yaml
@@ -1,17 +1,7 @@
- project:
- name: openstack/oslo.db
check:
jobs:
- - oslo.db-tox-mysql-python
+ - openstack-tox-lower-constraints
gate:
jobs:
- - oslo.db-tox-mysql-python
-
-- job:
- name: oslo.db-tox-mysql-python
- parent: legacy-base
- run: playbooks/legacy/oslo.db-tox-mysql-python/run.yaml
- post-run: playbooks/legacy/oslo.db-tox-mysql-python/post.yaml
- timeout: 2400
- required-projects:
- - openstack/requirements
+ - openstack-tox-lower-constraints
diff --git a/README.rst b/README.rst
index ee1bf23..2fdee43 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
Team and repository tags
========================
-.. image:: https://governance.openstack.org/badges/oslo.db.svg
+.. image:: https://governance.openstack.org/tc/badges/oslo.db.svg
:target: https://governance.openstack.org/tc/reference/tags/index.html
.. Change things from this point on
diff --git a/doc/requirements.txt b/doc/requirements.txt
index dd46184..dcc6aab 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -1,5 +1,13 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
-openstackdocstheme>=1.17.0 # Apache-2.0
-sphinx>=1.6.2 # BSD
+openstackdocstheme>=1.18.1 # Apache-2.0
+sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD
+doc8>=0.6.0 # Apache-2.0
+reno>=2.5.0 # Apache-2.0
+
+# These modules are needed when generating document
+fixtures>=3.0.0 # Apache-2.0/BSD
+testresources>=2.0.0 # Apache-2.0/BSD
+testscenarios>=0.4 # Apache-2.0/BSD
+oslotest>=3.2.0 # Apache-2.0
diff --git a/lower-constraints.txt b/lower-constraints.txt
new file mode 100644
index 0000000..2aad60b
--- /dev/null
+++ b/lower-constraints.txt
@@ -0,0 +1,71 @@
+alembic==0.8.10
+appdirs==1.3.0
+Babel==2.3.4
+bandit==1.4.0
+cffi==1.7.0
+cliff==2.8.0
+cmd2==0.8.0
+coverage==4.0
+debtcollector==1.2.0
+decorator==3.4.0
+eventlet==0.18.2
+extras==1.0.0
+fixtures==3.0.0
+flake8==2.5.5
+future==0.16.0
+gitdb==0.6.4
+GitPython==1.0.1
+greenlet==0.4.10
+hacking==0.12.0
+iso8601==0.1.11
+keystoneauth1==3.4.0
+linecache2==1.0.0
+Mako==0.4.0
+MarkupSafe==1.0
+mccabe==0.2.1
+mock==2.0.0
+monotonic==0.6
+mox3==0.20.0
+netaddr==0.7.18
+netifaces==0.10.4
+os-client-config==1.28.0
+os-testr==1.0.0
+oslo.config==5.2.0
+oslo.context==2.19.2
+oslo.i18n==3.15.3
+oslo.utils==3.33.0
+oslotest==3.2.0
+pbr==2.0.0
+pep8==1.5.7
+pifpaf==0.10.0
+prettytable==0.7.2
+psycopg2==2.6.2
+pycparser==2.18
+pyflakes==0.8.1
+PyMySQL==0.7.6
+pyparsing==2.1.0
+pyperclip==1.5.27
+python-editor==1.0.3
+python-mimeparse==1.6.0
+python-subunit==1.0.0
+pytz==2013.6
+PyYAML==3.12
+requests==2.14.2
+requestsexceptions==1.2.0
+rfc3986==0.3.1
+six==1.10.0
+smmap==0.9.0
+SQLAlchemy==1.0.10
+sqlalchemy-migrate==0.11.0
+sqlparse==0.2.2
+stestr==1.0.0
+stevedore==1.20.0
+Tempita==0.5.2
+testrepository==0.0.18
+testresources==2.0.0
+testscenarios==0.4
+testtools==2.2.0
+traceback2==1.4.0
+unittest2==1.1.0
+wrapt==1.7.0
+xattr==0.9.2
diff --git a/oslo_db/locale/en_GB/LC_MESSAGES/oslo_db.po b/oslo_db/locale/en_GB/LC_MESSAGES/oslo_db.po
index bc5d79a..c99e59e 100644
--- a/oslo_db/locale/en_GB/LC_MESSAGES/oslo_db.po
+++ b/oslo_db/locale/en_GB/LC_MESSAGES/oslo_db.po
@@ -7,18 +7,18 @@
# Andreas Jaeger <jaegerandi@gmail.com>, 2016. #zanata
msgid ""
msgstr ""
-"Project-Id-Version: oslo.db 4.6.1.dev46\n"
+"Project-Id-Version: oslo.db VERSION\n"
"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n"
-"POT-Creation-Date: 2016-06-15 11:18+0000\n"
+"POT-Creation-Date: 2018-02-20 22:06+0000\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"PO-Revision-Date: 2016-06-20 06:31+0000\n"
"Last-Translator: Andreas Jaeger <jaegerandi@gmail.com>\n"
-"Language: en-GB\n"
+"Language: en_GB\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
"Generated-By: Babel 2.0\n"
-"X-Generator: Zanata 3.7.3\n"
+"X-Generator: Zanata 4.3.3\n"
"Language-Team: English (United Kingdom)\n"
msgid "Invalid Parameter: Encoding directive wasn't provided."
diff --git a/oslo_db/options.py b/oslo_db/options.py
index 8bd70a2..9a08552 100644
--- a/oslo_db/options.py
+++ b/oslo_db/options.py
@@ -146,6 +146,11 @@ database_opts = [
'error before error is '
'raised. Set to -1 to specify an infinite retry '
'count.'),
+ cfg.StrOpt('connection_parameters',
+ default='',
+ help='Optional URL parameters to append onto the connection '
+ 'URL at connect time; specify as '
+ 'param1=value1&param2=value2&...'),
]
diff --git a/oslo_db/sqlalchemy/enginefacade.py b/oslo_db/sqlalchemy/enginefacade.py
index 7871258..2b9f81e 100644
--- a/oslo_db/sqlalchemy/enginefacade.py
+++ b/oslo_db/sqlalchemy/enginefacade.py
@@ -149,7 +149,8 @@ class _TransactionFactory(object):
'thread_checkin': _Default(True),
'json_serializer': _Default(None),
'json_deserializer': _Default(None),
- 'logging_name': _Default(None)
+ 'logging_name': _Default(None),
+ 'connection_parameters': _Default(None)
}
self._maker_cfg = {
'expire_on_commit': _Default(False),
@@ -164,15 +165,15 @@ class _TransactionFactory(object):
'on_engine_create': [],
}
- # other options that are defined in oslo.db.options.database_opts
- # but do not apply to the standard enginefacade arguments
- # (most seem to apply to api.DBAPI).
+ # other options that are defined in oslo_db.options.database_opts
+ # or oslo_db.concurrency.tpool_opts but do not apply to the standard
+ # enginefacade arguments (most seem to apply to api.DBAPI).
self._ignored_cfg = dict(
(k, _Default(None)) for k in [
'db_max_retries', 'db_inc_retry_interval',
'use_db_reconnect',
'db_retry_interval', 'min_pool_size',
- 'db_max_retry_interval', 'backend'])
+ 'db_max_retry_interval', 'backend', 'use_tpool'])
self._started = False
self._legacy_facade = None
@@ -219,6 +220,9 @@ class _TransactionFactory(object):
:param connection_debug: engine logging level, defaults to 0. set to
50 for INFO, 100 for DEBUG.
+ :param connection_parameters: additional parameters to append onto the
+ database URL query string, pass as "param1=value1&param2=value2&..."
+
:param max_pool_size: max size of connection pool, uses CONF for
default
@@ -393,7 +397,8 @@ class _TransactionFactory(object):
self._start()
if mode is _WRITER:
return self._writer_engine.connect()
- elif self.synchronous_reader or mode is _ASYNC_READER:
+ elif mode is _ASYNC_READER or \
+ (mode is _READER and not self.synchronous_reader):
return self._reader_engine.connect()
else:
return self._writer_engine.connect()
@@ -408,7 +413,8 @@ class _TransactionFactory(object):
kw['bind'] = bind
if mode is _WRITER:
return self._writer_maker(**kw)
- elif self.synchronous_reader or mode is _ASYNC_READER:
+ elif mode is _ASYNC_READER or \
+ (mode is _READER and not self.synchronous_reader):
return self._reader_maker(**kw)
else:
return self._writer_maker(**kw)
@@ -843,7 +849,8 @@ class _TransactionContextManager(object):
new = self._clone()
new._root = new
new._root_factory = self._root_factory._create_factory_copy()
- assert not new._factory._started
+ if new._factory._started:
+ raise AssertionError('TransactionFactory is already started')
return new
def patch_factory(self, factory_or_manager):
@@ -869,7 +876,8 @@ class _TransactionContextManager(object):
raise ValueError(
"_TransactionContextManager or "
"_TransactionFactory expected.")
- assert self._root is self
+ if self._root is not self:
+ raise AssertionError('patch_factory only works for root factory.')
existing_factory = self._root_factory
self._root_factory = factory
diff --git a/oslo_db/sqlalchemy/engines.py b/oslo_db/sqlalchemy/engines.py
index 2808ef4..05045ca 100644
--- a/oslo_db/sqlalchemy/engines.py
+++ b/oslo_db/sqlalchemy/engines.py
@@ -104,6 +104,21 @@ def _setup_logging(connection_debug=0):
logger.setLevel(logging.WARNING)
+def _extend_url_parameters(url, connection_parameters):
+ for key, value in six.moves.urllib.parse.parse_qs(
+ connection_parameters).items():
+ if key in url.query:
+ existing = url.query[key]
+ if not isinstance(existing, list):
+ url.query[key] = existing = utils.to_list(existing)
+ existing.extend(value)
+ value = existing
+ else:
+ url.query[key] = value
+ if len(value) == 1:
+ url.query[key] = value[0]
+
+
def _vet_url(url):
if "+" not in url.drivername and not url.drivername.startswith("sqlite"):
if url.drivername.startswith("mysql"):
@@ -132,11 +147,14 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
connection_trace=False, max_retries=10, retry_interval=10,
thread_checkin=True, logging_name=None,
json_serializer=None,
- json_deserializer=None):
+ json_deserializer=None, connection_parameters=None):
"""Return a new SQLAlchemy engine."""
url = sqlalchemy.engine.url.make_url(sql_connection)
+ if connection_parameters:
+ _extend_url_parameters(url, connection_parameters)
+
_vet_url(url)
engine_args = {
diff --git a/oslo_db/sqlalchemy/test_base.py b/oslo_db/sqlalchemy/test_base.py
index 14e9d02..8c6fdbd 100644
--- a/oslo_db/sqlalchemy/test_base.py
+++ b/oslo_db/sqlalchemy/test_base.py
@@ -32,10 +32,11 @@ from oslo_db import exception
from oslo_db.sqlalchemy import enginefacade
from oslo_db.sqlalchemy import provision
from oslo_db.sqlalchemy import session
-from oslo_db.sqlalchemy.test_fixtures import optimize_package_test_loader
-@debtcollector.removals.removed_class("DbFixture")
+@debtcollector.removals.removed_class(
+ "DbFixture",
+ message="Please use oslo_db.sqlalchemy.test_fixtures directly")
class DbFixture(fixtures.Fixture):
"""Basic database fixture.
@@ -90,7 +91,9 @@ class DbFixture(fixtures.Fixture):
self.addCleanup(self.test.enginefacade.dispose_global)
-@debtcollector.removals.removed_class("DbTestCase")
+@debtcollector.removals.removed_class(
+ "DbTestCase",
+ message="Please use oslo_db.sqlalchemy.test_fixtures directly")
class DbTestCase(test_base.BaseTestCase):
"""Base class for testing of DB code.
@@ -241,7 +244,3 @@ class MySQLOpportunisticTestCase(OpportunisticTestCase):
@debtcollector.removals.removed_class("PostgreSQLOpportunisticTestCase")
class PostgreSQLOpportunisticTestCase(OpportunisticTestCase):
FIXTURE = PostgreSQLOpportunisticFixture
-
-
-optimize_db_test_loader = debtcollector.moves.moved_function(
- optimize_package_test_loader, "optimize_db_test_loader", __name__)
diff --git a/oslo_db/sqlalchemy/test_fixtures.py b/oslo_db/sqlalchemy/test_fixtures.py
index f3b5c1c..6b82f05 100644
--- a/oslo_db/sqlalchemy/test_fixtures.py
+++ b/oslo_db/sqlalchemy/test_fixtures.py
@@ -402,8 +402,9 @@ class OpportunisticDbFixture(BaseDbFixture):
This fixture relies upon the use of the OpportunisticDBTestMixin to supply
a test.resources attribute, and also works much more effectively when
combined the testresources.OptimisingTestSuite. The
- optimize_db_test_loader() function should be used at the module and package
- levels to optimize database provisioning across many tests.
+ optimize_package_test_loader() function should be
+ used at the module and package levels to optimize database
+ provisioning across many tests.
"""
def __init__(self, test, driver=None, ident=None):
diff --git a/oslo_db/sqlalchemy/test_migrations.py b/oslo_db/sqlalchemy/test_migrations.py
index d650025..4b8cb12 100644
--- a/oslo_db/sqlalchemy/test_migrations.py
+++ b/oslo_db/sqlalchemy/test_migrations.py
@@ -476,9 +476,10 @@ class ModelsMigrationsSync(object):
if isinstance(meta_col.type, sqlalchemy.Boolean):
if meta_def is None or insp_def is None:
return meta_def != insp_def
+ insp_def = insp_def.strip("'")
return not (
- isinstance(meta_def.arg, expr.True_) and insp_def == "'1'" or
- isinstance(meta_def.arg, expr.False_) and insp_def == "'0'"
+ isinstance(meta_def.arg, expr.True_) and insp_def == "1" or
+ isinstance(meta_def.arg, expr.False_) and insp_def == "0"
)
impl_type = meta_col.type
@@ -487,7 +488,8 @@ class ModelsMigrationsSync(object):
if isinstance(impl_type, (sqlalchemy.Integer, sqlalchemy.BigInteger)):
if meta_def is None or insp_def is None:
return meta_def != insp_def
- return meta_def.arg != insp_def.split("'")[1]
+ insp_def = insp_def.strip("'")
+ return meta_def.arg != insp_def
@_compare_server_default.dispatch_for('postgresql')
def _compare_server_default(bind, meta_col, insp_def, meta_def):
diff --git a/oslo_db/sqlalchemy/update_match.py b/oslo_db/sqlalchemy/update_match.py
index 5765817..543101e 100644
--- a/oslo_db/sqlalchemy/update_match.py
+++ b/oslo_db/sqlalchemy/update_match.py
@@ -164,9 +164,9 @@ def update_on_match(
entity = inspect(specimen)
mapper = entity.mapper
- assert \
- [desc['type'] for desc in query.column_descriptions] == \
- [mapper.class_], "Query does not match given specimen"
+ if [desc['type'] for desc in query.column_descriptions] != \
+ [mapper.class_]:
+ raise AssertionError("Query does not match given specimen")
criteria = manufacture_entity_criteria(
specimen, include_only=include_only, exclude=[surrogate_key])
diff --git a/oslo_db/sqlalchemy/utils.py b/oslo_db/sqlalchemy/utils.py
index 7c5f22c..34d26de 100644
--- a/oslo_db/sqlalchemy/utils.py
+++ b/oslo_db/sqlalchemy/utils.py
@@ -181,7 +181,9 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
LOG.warning('Unique keys not in sort_keys. '
'The sorting order may be unstable.')
- assert(not (sort_dir and sort_dirs))
+ if sort_dir and sort_dirs:
+ raise AssertionError('Disallow set sort_dir and '
+ 'sort_dirs at the same time.')
# Default the sort direction to ascending
if sort_dirs is None and sort_dir is None:
@@ -191,7 +193,8 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
if sort_dirs is None:
sort_dirs = [sort_dir for _sort_key in sort_keys]
- assert(len(sort_dirs) == len(sort_keys))
+ if len(sort_dirs) != len(sort_keys):
+ raise AssertionError('sort_dirs and sort_keys must have same length.')
# Add sorting
for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs):
@@ -1152,7 +1155,8 @@ def suspend_fk_constraints_for_col_alter(
fks = []
for ref_table_name in referents:
for fk in insp.get_foreign_keys(ref_table_name):
- assert fk.get('name')
+ if not fk.get('name'):
+ raise AssertionError("foreign key hasn't a name.")
if fk['referred_table'] == table_name and \
column_name in fk['referred_columns']:
fk['source_table'] = ref_table_name
diff --git a/oslo_db/tests/sqlalchemy/base.py b/oslo_db/tests/sqlalchemy/base.py
index a23f249..e07b45c 100644
--- a/oslo_db/tests/sqlalchemy/base.py
+++ b/oslo_db/tests/sqlalchemy/base.py
@@ -13,6 +13,8 @@
# License for the specific language governing permissions and limitations
# under the License.
+import debtcollector
+
from oslo_db.sqlalchemy import enginefacade
from oslo_db.sqlalchemy.test_base import backend_specific # noqa
from oslo_db.sqlalchemy import test_fixtures as db_fixtures
@@ -26,6 +28,10 @@ class Context(object):
context = Context()
+@debtcollector.removals.removed_class(
+ "DbTestCase",
+ message="Do not import from oslo_db.tests! "
+ "Please use oslo_db.sqlalchemy.test_fixtures directly")
class DbTestCase(db_fixtures.OpportunisticDBTestMixin, test_base.BaseTestCase):
def setUp(self):
@@ -35,9 +41,38 @@ class DbTestCase(db_fixtures.OpportunisticDBTestMixin, test_base.BaseTestCase):
self.sessionmaker = enginefacade.writer.get_sessionmaker()
+@debtcollector.removals.removed_class(
+ "MySQLOpportunisticTestCase",
+ message="Do not import from oslo_db.tests! "
+ "Please use oslo_db.sqlalchemy.test_fixtures directly")
class MySQLOpportunisticTestCase(DbTestCase):
FIXTURE = db_fixtures.MySQLOpportunisticFixture
+@debtcollector.removals.removed_class(
+ "PostgreSQLOpportunisticTestCase",
+ message="Do not import from oslo_db.tests! "
+ "Please use oslo_db.sqlalchemy.test_fixtures directly")
class PostgreSQLOpportunisticTestCase(DbTestCase):
FIXTURE = db_fixtures.PostgresqlOpportunisticFixture
+
+
+# NOTE (zzzeek) These test classes are **private to oslo.db**. Please
+# make use of oslo_db.sqlalchemy.test_fixtures directly.
+
+class _DbTestCase(
+ db_fixtures.OpportunisticDBTestMixin, test_base.BaseTestCase):
+
+ def setUp(self):
+ super(_DbTestCase, self).setUp()
+
+ self.engine = enginefacade.writer.get_engine()
+ self.sessionmaker = enginefacade.writer.get_sessionmaker()
+
+
+class _MySQLOpportunisticTestCase(_DbTestCase):
+ FIXTURE = db_fixtures.MySQLOpportunisticFixture
+
+
+class _PostgreSQLOpportunisticTestCase(_DbTestCase):
+ FIXTURE = db_fixtures.PostgresqlOpportunisticFixture
diff --git a/oslo_db/tests/sqlalchemy/test_async_eventlet.py b/oslo_db/tests/sqlalchemy/test_async_eventlet.py
index 7eebed4..747601b 100644
--- a/oslo_db/tests/sqlalchemy/test_async_eventlet.py
+++ b/oslo_db/tests/sqlalchemy/test_async_eventlet.py
@@ -118,10 +118,10 @@ class EventletTestMixin(object):
# ie: This file performs no tests by default.
class MySQLEventletTestCase(EventletTestMixin,
- test_base.MySQLOpportunisticTestCase):
+ test_base._MySQLOpportunisticTestCase):
pass
class PostgreSQLEventletTestCase(EventletTestMixin,
- test_base.PostgreSQLOpportunisticTestCase):
+ test_base._PostgreSQLOpportunisticTestCase):
pass
diff --git a/oslo_db/tests/sqlalchemy/test_enginefacade.py b/oslo_db/tests/sqlalchemy/test_enginefacade.py
index 8b782a3..ced9483 100644
--- a/oslo_db/tests/sqlalchemy/test_enginefacade.py
+++ b/oslo_db/tests/sqlalchemy/test_enginefacade.py
@@ -135,15 +135,15 @@ class MockFacadeTest(oslo_test_base.BaseTestCase):
async_reader_maker = writer_maker
if self.synchronous_reader:
- reader_conn = async_reader_conn
- reader_engine = async_reader_engine
- reader_session = async_reader_session
- reader_maker = async_reader_maker
- else:
reader_conn = writer_conn
reader_engine = writer_engine
reader_session = writer_session
reader_maker = writer_maker
+ else:
+ reader_conn = async_reader_conn
+ reader_engine = async_reader_engine
+ reader_session = async_reader_session
+ reader_maker = async_reader_maker
self.connections = AssertDataSource(
writer_conn, reader_conn, async_reader_conn
@@ -236,9 +236,9 @@ class MockFacadeTest(oslo_test_base.BaseTestCase):
async_reader_engine = writer_engine
if self.synchronous_reader:
- reader_engine = async_reader_engine
- else:
reader_engine = writer_engine
+ else:
+ reader_engine = async_reader_engine
engines = AssertDataSource(
writer_engine, reader_engine, async_reader_engine)
@@ -333,9 +333,9 @@ class MockFacadeTest(oslo_test_base.BaseTestCase):
async_reader_maker = writer_maker
if self.synchronous_reader:
- reader_maker = async_reader_maker
- else:
reader_maker = writer_maker
+ else:
+ reader_maker = async_reader_maker
makers = AssertDataSource(
writer_maker,
@@ -1335,7 +1335,7 @@ class AsyncReaderWSlaveMockFacadeTest(MockFacadeTest):
slave_uri = 'some_slave_connection'
-class LegacyIntegrationtest(test_base.DbTestCase):
+class LegacyIntegrationtest(test_base._DbTestCase):
def test_legacy_integration(self):
legacy_facade = enginefacade.get_legacy_facade()
@@ -1393,7 +1393,7 @@ class LegacyIntegrationtest(test_base.DbTestCase):
)
-class ThreadingTest(test_base.DbTestCase):
+class ThreadingTest(test_base._DbTestCase):
"""Test copy/pickle on new threads using real connections and sessions."""
def _assert_ctx_connection(self, context, connection):
@@ -1586,7 +1586,7 @@ class ThreadingTest(test_base.DbTestCase):
assert session is not session2
-class LiveFacadeTest(test_base.DbTestCase):
+class LiveFacadeTest(test_base._DbTestCase):
"""test using live SQL with test-provisioned databases.
Several of these tests require that multiple transactions run
@@ -1780,7 +1780,7 @@ class LiveFacadeTest(test_base.DbTestCase):
session = self.sessionmaker(autocommit=True)
- # inner transction + second part of "outer" transaction were committed
+ # inner transaction + second part of "outer" transaction were committed
self.assertEqual(
[("u2",), ("u3",), ("u4", )],
session.query(
@@ -1820,7 +1820,7 @@ class LiveFacadeTest(test_base.DbTestCase):
session = self.sessionmaker(autocommit=True)
- # inner transction + second part of "outer" transaction were committed
+ # inner transaction + second part of "outer" transaction were committed
self.assertEqual(
[("u1",), ("u3",), ("u4", )],
session.query(
@@ -1848,7 +1848,7 @@ class LiveFacadeTest(test_base.DbTestCase):
session = self.sessionmaker(autocommit=True)
- # inner transction + second part of "outer" transaction were committed
+ # inner transaction + second part of "outer" transaction were committed
self.assertEqual(
[("u1",), ("u3",), ("u4", )],
session.query(
@@ -1896,7 +1896,7 @@ class LiveFacadeTest(test_base.DbTestCase):
session = self.sessionmaker(autocommit=True)
- # inner transction + second part of "outer" transaction were committed
+ # inner transaction + second part of "outer" transaction were committed
self.assertEqual(
[("u2",), ("u3",), ("u4", )],
session.query(
@@ -2127,12 +2127,12 @@ class LiveFacadeTest(test_base.DbTestCase):
class MySQLLiveFacadeTest(
- test_base.MySQLOpportunisticTestCase, LiveFacadeTest):
+ test_base._MySQLOpportunisticTestCase, LiveFacadeTest):
pass
class PGLiveFacadeTest(
- test_base.PostgreSQLOpportunisticTestCase, LiveFacadeTest):
+ test_base._PostgreSQLOpportunisticTestCase, LiveFacadeTest):
pass
diff --git a/oslo_db/tests/sqlalchemy/test_exc_filters.py b/oslo_db/tests/sqlalchemy/test_exc_filters.py
index 9c2b417..208a40a 100644
--- a/oslo_db/tests/sqlalchemy/test_exc_filters.py
+++ b/oslo_db/tests/sqlalchemy/test_exc_filters.py
@@ -251,7 +251,7 @@ class TestFallthroughsAndNonDBAPI(TestsExceptionFilter):
class TestNonExistentConstraint(
_SQLAExceptionMatcher,
- test_base.DbTestCase):
+ test_base._DbTestCase):
def setUp(self):
super(TestNonExistentConstraint, self).setUp()
@@ -269,7 +269,7 @@ class TestNonExistentConstraint(
class TestNonExistentConstraintPostgreSQL(
TestNonExistentConstraint,
- test_base.PostgreSQLOpportunisticTestCase):
+ test_base._PostgreSQLOpportunisticTestCase):
def test_raise(self):
matched = self.assertRaises(
@@ -293,7 +293,7 @@ class TestNonExistentConstraintPostgreSQL(
class TestNonExistentConstraintMySQL(
TestNonExistentConstraint,
- test_base.MySQLOpportunisticTestCase):
+ test_base._MySQLOpportunisticTestCase):
def test_raise(self):
matched = self.assertRaises(
@@ -317,7 +317,7 @@ class TestNonExistentConstraintMySQL(
class TestNonExistentTable(
_SQLAExceptionMatcher,
- test_base.DbTestCase):
+ test_base._DbTestCase):
def setUp(self):
super(TestNonExistentTable, self).setUp()
@@ -348,7 +348,7 @@ class TestNonExistentTable(
class TestNonExistentTablePostgreSQL(
TestNonExistentTable,
- test_base.PostgreSQLOpportunisticTestCase):
+ test_base._PostgreSQLOpportunisticTestCase):
def test_raise(self):
matched = self.assertRaises(
@@ -367,7 +367,7 @@ class TestNonExistentTablePostgreSQL(
class TestNonExistentTableMySQL(
TestNonExistentTable,
- test_base.MySQLOpportunisticTestCase):
+ test_base._MySQLOpportunisticTestCase):
def test_raise(self):
matched = self.assertRaises(
@@ -385,7 +385,7 @@ class TestNonExistentTableMySQL(
class TestNonExistentDatabase(
_SQLAExceptionMatcher,
- test_base.DbTestCase):
+ test_base._DbTestCase):
def setUp(self):
super(TestNonExistentDatabase, self).setUp()
@@ -411,7 +411,7 @@ class TestNonExistentDatabase(
class TestNonExistentDatabaseMySQL(
TestNonExistentDatabase,
- test_base.MySQLOpportunisticTestCase):
+ test_base._MySQLOpportunisticTestCase):
def test_raise(self):
matched = self.assertRaises(
@@ -430,7 +430,7 @@ class TestNonExistentDatabaseMySQL(
class TestNonExistentDatabasePostgreSQL(
TestNonExistentDatabase,
- test_base.PostgreSQLOpportunisticTestCase):
+ test_base._PostgreSQLOpportunisticTestCase):
def test_raise(self):
matched = self.assertRaises(
@@ -446,7 +446,7 @@ class TestNonExistentDatabasePostgreSQL(
)
-class TestReferenceErrorSQLite(_SQLAExceptionMatcher, test_base.DbTestCase):
+class TestReferenceErrorSQLite(_SQLAExceptionMatcher, test_base._DbTestCase):
def setUp(self):
super(TestReferenceErrorSQLite, self).setUp()
@@ -520,7 +520,7 @@ class TestReferenceErrorSQLite(_SQLAExceptionMatcher, test_base.DbTestCase):
class TestReferenceErrorPostgreSQL(TestReferenceErrorSQLite,
- test_base.PostgreSQLOpportunisticTestCase):
+ test_base._PostgreSQLOpportunisticTestCase):
def test_raise(self):
params = {'id': 1, 'foo_id': 2}
matched = self.assertRaises(
@@ -571,7 +571,7 @@ class TestReferenceErrorPostgreSQL(TestReferenceErrorSQLite,
class TestReferenceErrorMySQL(TestReferenceErrorSQLite,
- test_base.MySQLOpportunisticTestCase):
+ test_base._MySQLOpportunisticTestCase):
def test_raise(self):
matched = self.assertRaises(
exception.DBReferenceError,
@@ -632,7 +632,7 @@ class TestReferenceErrorMySQL(TestReferenceErrorSQLite,
self.assertEqual("resource_foo", matched.key_table)
-class TestExceptionCauseMySQLSavepoint(test_base.MySQLOpportunisticTestCase):
+class TestExceptionCauseMySQLSavepoint(test_base._MySQLOpportunisticTestCase):
def setUp(self):
super(TestExceptionCauseMySQLSavepoint, self).setUp()
@@ -762,7 +762,7 @@ class TestExceptionCauseMySQLSavepoint(test_base.MySQLOpportunisticTestCase):
assert False, "no exception raised"
-class TestDBDataErrorSQLite(_SQLAExceptionMatcher, test_base.DbTestCase):
+class TestDBDataErrorSQLite(_SQLAExceptionMatcher, test_base._DbTestCase):
def setUp(self):
super(TestDBDataErrorSQLite, self).setUp()
@@ -1084,7 +1084,7 @@ class TestDataError(TestsExceptionFilter):
self.DataError)
-class IntegrationTest(test_base.DbTestCase):
+class IntegrationTest(test_base._DbTestCase):
"""Test an actual error-raising round trips against the database."""
def setUp(self):
diff --git a/oslo_db/tests/sqlalchemy/test_fixtures.py b/oslo_db/tests/sqlalchemy/test_fixtures.py
index df905e8..af6bfa5 100644
--- a/oslo_db/tests/sqlalchemy/test_fixtures.py
+++ b/oslo_db/tests/sqlalchemy/test_fixtures.py
@@ -265,9 +265,6 @@ class TestLoadHook(unittest.TestCase):
def test_package_level(self):
self._test_package_level(test_fixtures.optimize_package_test_loader)
- def test_package_level_legacy(self):
- self._test_package_level(legacy_test_base.optimize_db_test_loader)
-
def _test_package_level(self, fn):
load_tests = fn(
os.path.join(start_dir, "__init__.py"))
diff --git a/oslo_db/tests/sqlalchemy/test_migration_common.py b/oslo_db/tests/sqlalchemy/test_migration_common.py
index 3f5f8ff..377c6a9 100644
--- a/oslo_db/tests/sqlalchemy/test_migration_common.py
+++ b/oslo_db/tests/sqlalchemy/test_migration_common.py
@@ -28,7 +28,7 @@ from oslo_db.tests.sqlalchemy import base as test_base
from oslo_db.tests import utils as test_utils
-class TestMigrationCommon(test_base.DbTestCase):
+class TestMigrationCommon(test_base._DbTestCase):
def setUp(self):
super(TestMigrationCommon, self).setUp()
diff --git a/oslo_db/tests/sqlalchemy/test_migrations.py b/oslo_db/tests/sqlalchemy/test_migrations.py
index d42812d..62cbdfb 100644
--- a/oslo_db/tests/sqlalchemy/test_migrations.py
+++ b/oslo_db/tests/sqlalchemy/test_migrations.py
@@ -180,7 +180,7 @@ class TestWalkVersions(test.BaseTestCase, migrate.WalkVersionsMixin):
self.assertEqual(upgraded, self.migrate_up.call_args_list)
-class ModelsMigrationSyncMixin(test_base.DbTestCase):
+class ModelsMigrationSyncMixin(test_base._DbTestCase):
def setUp(self):
super(ModelsMigrationSyncMixin, self).setUp()
@@ -359,7 +359,7 @@ class ModelsMigrationSyncMixin(test_base.DbTestCase):
class ModelsMigrationsSyncMysql(ModelsMigrationSyncMixin,
migrate.ModelsMigrationsSync,
- test_base.MySQLOpportunisticTestCase):
+ test_base._MySQLOpportunisticTestCase):
def test_models_not_sync(self):
self._test_models_not_sync()
@@ -370,7 +370,7 @@ class ModelsMigrationsSyncMysql(ModelsMigrationSyncMixin,
class ModelsMigrationsSyncPsql(ModelsMigrationSyncMixin,
migrate.ModelsMigrationsSync,
- test_base.PostgreSQLOpportunisticTestCase):
+ test_base._PostgreSQLOpportunisticTestCase):
def test_models_not_sync(self):
self._test_models_not_sync()
@@ -379,7 +379,7 @@ class ModelsMigrationsSyncPsql(ModelsMigrationSyncMixin,
self._test_models_not_sync_filtered()
-class TestOldCheckForeignKeys(test_base.DbTestCase):
+class TestOldCheckForeignKeys(test_base._DbTestCase):
def setUp(self):
super(TestOldCheckForeignKeys, self).setUp()
@@ -557,10 +557,10 @@ class TestOldCheckForeignKeys(test_base.DbTestCase):
class PGTestOldCheckForeignKeys(
- TestOldCheckForeignKeys, test_base.PostgreSQLOpportunisticTestCase):
+ TestOldCheckForeignKeys, test_base._PostgreSQLOpportunisticTestCase):
pass
class MySQLTestOldCheckForeignKeys(
- TestOldCheckForeignKeys, test_base.MySQLOpportunisticTestCase):
+ TestOldCheckForeignKeys, test_base._MySQLOpportunisticTestCase):
pass
diff --git a/oslo_db/tests/sqlalchemy/test_models.py b/oslo_db/tests/sqlalchemy/test_models.py
index 0b0f6d8..540f4f8 100644
--- a/oslo_db/tests/sqlalchemy/test_models.py
+++ b/oslo_db/tests/sqlalchemy/test_models.py
@@ -30,7 +30,7 @@ from oslo_db.tests.sqlalchemy import base as test_base
BASE = declarative_base()
-class ModelBaseTest(test_base.DbTestCase):
+class ModelBaseTest(test_base._DbTestCase):
def setUp(self):
super(ModelBaseTest, self).setUp()
self.mb = models.ModelBase()
@@ -191,7 +191,7 @@ class SoftDeletedModel(BASE, models.ModelBase, models.SoftDeleteMixin):
smth = Column('smth', String(255))
-class SoftDeleteMixinTest(test_base.DbTestCase):
+class SoftDeleteMixinTest(test_base._DbTestCase):
def setUp(self):
super(SoftDeleteMixinTest, self).setUp()
diff --git a/oslo_db/tests/sqlalchemy/test_provision.py b/oslo_db/tests/sqlalchemy/test_provision.py
index 8f0928f..d985595 100644
--- a/oslo_db/tests/sqlalchemy/test_provision.py
+++ b/oslo_db/tests/sqlalchemy/test_provision.py
@@ -27,7 +27,7 @@ from oslo_db.sqlalchemy import utils
from oslo_db.tests.sqlalchemy import base as test_base
-class DropAllObjectsTest(test_base.DbTestCase):
+class DropAllObjectsTest(test_base._DbTestCase):
def setUp(self):
super(DropAllObjectsTest, self).setUp()
@@ -138,12 +138,12 @@ class BackendNotAvailableTest(oslo_test_base.BaseTestCase):
class MySQLDropAllObjectsTest(
- DropAllObjectsTest, test_base.MySQLOpportunisticTestCase):
+ DropAllObjectsTest, test_base._MySQLOpportunisticTestCase):
pass
class PostgreSQLDropAllObjectsTest(
- DropAllObjectsTest, test_base.PostgreSQLOpportunisticTestCase):
+ DropAllObjectsTest, test_base._PostgreSQLOpportunisticTestCase):
pass
diff --git a/oslo_db/tests/sqlalchemy/test_sqlalchemy.py b/oslo_db/tests/sqlalchemy/test_sqlalchemy.py
index 4ae2506..9ebf58c 100644
--- a/oslo_db/tests/sqlalchemy/test_sqlalchemy.py
+++ b/oslo_db/tests/sqlalchemy/test_sqlalchemy.py
@@ -52,7 +52,7 @@ class RegexpTable(BASE, models.ModelBase):
bar = Column(String(255))
-class RegexpFilterTestCase(test_base.DbTestCase):
+class RegexpFilterTestCase(test_base._DbTestCase):
def setUp(self):
super(RegexpFilterTestCase, self).setUp()
@@ -90,7 +90,7 @@ class RegexpFilterTestCase(test_base.DbTestCase):
self._test_regexp_filter(u'♦', [])
-class SQLiteSavepointTest(test_base.DbTestCase):
+class SQLiteSavepointTest(test_base._DbTestCase):
def setUp(self):
super(SQLiteSavepointTest, self).setUp()
meta = MetaData()
@@ -213,7 +213,80 @@ class FakeDB2Engine(object):
pass
-class MySQLDefaultModeTestCase(test_base.MySQLOpportunisticTestCase):
+class QueryParamTest(test_base.DbTestCase):
+ def _fixture(self):
+ from sqlalchemy import create_engine
+
+ def _mock_create_engine(*arg, **kw):
+ return create_engine("sqlite://")
+
+ return mock.patch(
+ "oslo_db.sqlalchemy.engines.sqlalchemy.create_engine",
+ side_effect=_mock_create_engine)
+
+ def test_add_assorted_params(self):
+ with self._fixture() as ce:
+ engines.create_engine(
+ "mysql+pymysql://foo:bar@bat",
+ connection_parameters="foo=bar&bat=hoho&bat=param2")
+
+ self.assertEqual(
+ ce.mock_calls[0][1][0].query,
+ {'bat': ['hoho', 'param2'], 'foo': 'bar'}
+ )
+
+ def test_add_no_params(self):
+ with self._fixture() as ce:
+ engines.create_engine(
+ "mysql+pymysql://foo:bar@bat")
+
+ self.assertEqual(
+ ce.mock_calls[0][1][0].query,
+ {}
+ )
+
+ def test_combine_params(self):
+ with self._fixture() as ce:
+ engines.create_engine(
+ "mysql+pymysql://foo:bar@bat/"
+ "?charset=utf8&param_file=tripleo.cnf",
+ connection_parameters="plugin=sqlalchemy_collectd&"
+ "collectd_host=127.0.0.1&"
+ "bind_host=192.168.1.5")
+
+ self.assertEqual(
+ ce.mock_calls[0][1][0].query,
+ {
+ 'bind_host': '192.168.1.5',
+ 'charset': 'utf8',
+ 'collectd_host': '127.0.0.1',
+ 'param_file': 'tripleo.cnf',
+ 'plugin': 'sqlalchemy_collectd'
+ }
+ )
+
+ def test_combine_multi_params(self):
+ with self._fixture() as ce:
+ engines.create_engine(
+ "mysql+pymysql://foo:bar@bat/"
+ "?charset=utf8&param_file=tripleo.cnf&plugin=connmon",
+ connection_parameters="plugin=sqlalchemy_collectd&"
+ "collectd_host=127.0.0.1&"
+ "bind_host=192.168.1.5")
+
+ self.assertEqual(
+ ce.mock_calls[0][1][0].query,
+ {
+ 'bind_host': '192.168.1.5',
+ 'charset': 'utf8',
+ 'collectd_host': '127.0.0.1',
+ 'param_file': 'tripleo.cnf',
+ 'plugin': ['connmon', 'sqlalchemy_collectd']
+ }
+ )
+
+
+class MySQLDefaultModeTestCase(test_base._MySQLOpportunisticTestCase):
def test_default_is_traditional(self):
with self.engine.connect() as conn:
sql_mode = conn.execute(
@@ -223,7 +296,7 @@ class MySQLDefaultModeTestCase(test_base.MySQLOpportunisticTestCase):
self.assertIn("TRADITIONAL", sql_mode)
-class MySQLModeTestCase(test_base.MySQLOpportunisticTestCase):
+class MySQLModeTestCase(test_base._MySQLOpportunisticTestCase):
def __init__(self, *args, **kwargs):
super(MySQLModeTestCase, self).__init__(*args, **kwargs)
@@ -357,6 +430,7 @@ class EngineFacadeTestCase(oslo_test.BaseTestCase):
thread_checkin=mock.ANY,
json_serializer=None,
json_deserializer=None,
+ connection_parameters='',
logging_name=mock.ANY,
)
get_maker.assert_called_once_with(engine=create_engine(),
@@ -438,7 +512,7 @@ class SQLiteConnectTest(oslo_test.BaseTestCase):
)
-class MysqlConnectTest(test_base.MySQLOpportunisticTestCase):
+class MysqlConnectTest(test_base._MySQLOpportunisticTestCase):
def _fixture(self, sql_mode):
return session.create_engine(self.engine.url, mysql_sql_mode=sql_mode)
@@ -716,7 +790,7 @@ class CreateEngineTest(oslo_test.BaseTestCase):
)
-class ProcessGuardTest(test_base.DbTestCase):
+class ProcessGuardTest(test_base._DbTestCase):
def test_process_guard(self):
self.engine.dispose()
@@ -744,7 +818,7 @@ class ProcessGuardTest(test_base.DbTestCase):
self.assertEqual(new_dbapi_id, newer_dbapi_id)
-class PatchStacktraceTest(test_base.DbTestCase):
+class PatchStacktraceTest(test_base._DbTestCase):
def test_trace(self):
engine = self.engine
diff --git a/oslo_db/tests/sqlalchemy/test_types.py b/oslo_db/tests/sqlalchemy/test_types.py
index 6103ce3..8eeb401 100644
--- a/oslo_db/tests/sqlalchemy/test_types.py
+++ b/oslo_db/tests/sqlalchemy/test_types.py
@@ -33,7 +33,7 @@ class JsonTable(BASE, models.ModelBase):
json = Column(types.JsonEncodedType)
-class JsonTypesTestCase(test_base.DbTestCase):
+class JsonTypesTestCase(test_base._DbTestCase):
def setUp(self):
super(JsonTypesTestCase, self).setUp()
JsonTable.__table__.create(self.engine)
diff --git a/oslo_db/tests/sqlalchemy/test_update_match.py b/oslo_db/tests/sqlalchemy/test_update_match.py
index c876bf3..36ebf73 100644
--- a/oslo_db/tests/sqlalchemy/test_update_match.py
+++ b/oslo_db/tests/sqlalchemy/test_update_match.py
@@ -85,7 +85,7 @@ class ManufactureCriteriaTest(oslo_test_base.BaseTestCase):
)
-class UpdateMatchTest(test_base.DbTestCase):
+class UpdateMatchTest(test_base._DbTestCase):
def setUp(self):
super(UpdateMatchTest, self).setUp()
Base.metadata.create_all(self.engine)
@@ -435,11 +435,11 @@ class UpdateMatchTest(test_base.DbTestCase):
class PGUpdateMatchTest(
UpdateMatchTest,
- test_base.PostgreSQLOpportunisticTestCase):
+ test_base._PostgreSQLOpportunisticTestCase):
pass
class MySQLUpdateMatchTest(
UpdateMatchTest,
- test_base.MySQLOpportunisticTestCase):
+ test_base._MySQLOpportunisticTestCase):
pass
diff --git a/oslo_db/tests/sqlalchemy/test_utils.py b/oslo_db/tests/sqlalchemy/test_utils.py
index d6fb3a3..7f44ef5 100644
--- a/oslo_db/tests/sqlalchemy/test_utils.py
+++ b/oslo_db/tests/sqlalchemy/test_utils.py
@@ -547,7 +547,7 @@ class TestPaginateQueryActualSQL(test_base.BaseTestCase):
)
-class TestMigrationUtils(db_test_base.DbTestCase):
+class TestMigrationUtils(db_test_base._DbTestCase):
"""Class for testing utils that are used in db migrations."""
@@ -957,14 +957,14 @@ class TestMigrationUtils(db_test_base.DbTestCase):
class PostgresqlTestMigrations(TestMigrationUtils,
- db_test_base.PostgreSQLOpportunisticTestCase):
+ db_test_base._PostgreSQLOpportunisticTestCase):
"""Test migrations on PostgreSQL."""
pass
class MySQLTestMigrations(TestMigrationUtils,
- db_test_base.MySQLOpportunisticTestCase):
+ db_test_base._MySQLOpportunisticTestCase):
"""Test migrations on MySQL."""
pass
@@ -1146,7 +1146,7 @@ class TestModelQuery(test_base.BaseTestCase):
self.session.query.assert_called_with(MyModel.id)
-class TestUtils(db_test_base.DbTestCase):
+class TestUtils(db_test_base._DbTestCase):
def setUp(self):
super(TestUtils, self).setUp()
meta = MetaData(bind=self.engine)
@@ -1222,12 +1222,12 @@ class TestUtils(db_test_base.DbTestCase):
class TestUtilsMysqlOpportunistically(
- TestUtils, db_test_base.MySQLOpportunisticTestCase):
+ TestUtils, db_test_base._MySQLOpportunisticTestCase):
pass
class TestUtilsPostgresqlOpportunistically(
- TestUtils, db_test_base.PostgreSQLOpportunisticTestCase):
+ TestUtils, db_test_base._PostgreSQLOpportunisticTestCase):
pass
@@ -1536,7 +1536,7 @@ class TestDialectFunctionDispatcher(test_base.BaseTestCase):
)
-class TestGetInnoDBTables(db_test_base.MySQLOpportunisticTestCase):
+class TestGetInnoDBTables(db_test_base._MySQLOpportunisticTestCase):
def test_all_tables_use_innodb(self):
self.engine.execute("CREATE TABLE customers "
diff --git a/playbooks/legacy/oslo.db-tox-mysql-python/post.yaml b/playbooks/legacy/oslo.db-tox-mysql-python/post.yaml
deleted file mode 100644
index 68fbdf8..0000000
--- a/playbooks/legacy/oslo.db-tox-mysql-python/post.yaml
+++ /dev/null
@@ -1,67 +0,0 @@
-- hosts: primary
- tasks:
-
- - name: Copy files from {{ ansible_user_dir }}/workspace/ on node
- synchronize:
- src: '{{ ansible_user_dir }}/workspace/'
- dest: '{{ zuul.executor.log_root }}'
- mode: pull
- copy_links: true
- verify_host: true
- rsync_opts:
- - --include=**/*nose_results.html
- - --include=*/
- - --exclude=*
- - --prune-empty-dirs
-
- - name: Copy files from {{ ansible_user_dir }}/workspace/ on node
- synchronize:
- src: '{{ ansible_user_dir }}/workspace/'
- dest: '{{ zuul.executor.log_root }}'
- mode: pull
- copy_links: true
- verify_host: true
- rsync_opts:
- - --include=**/*testr_results.html.gz
- - --include=*/
- - --exclude=*
- - --prune-empty-dirs
-
- - name: Copy files from {{ ansible_user_dir }}/workspace/ on node
- synchronize:
- src: '{{ ansible_user_dir }}/workspace/'
- dest: '{{ zuul.executor.log_root }}'
- mode: pull
- copy_links: true
- verify_host: true
- rsync_opts:
- - --include=/.testrepository/tmp*
- - --include=*/
- - --exclude=*
- - --prune-empty-dirs
-
- - name: Copy files from {{ ansible_user_dir }}/workspace/ on node
- synchronize:
- src: '{{ ansible_user_dir }}/workspace/'
- dest: '{{ zuul.executor.log_root }}'
- mode: pull
- copy_links: true
- verify_host: true
- rsync_opts:
- - --include=**/*testrepository.subunit.gz
- - --include=*/
- - --exclude=*
- - --prune-empty-dirs
-
- - name: Copy files from {{ ansible_user_dir }}/workspace/ on node
- synchronize:
- src: '{{ ansible_user_dir }}/workspace/'
- dest: '{{ zuul.executor.log_root }}/tox'
- mode: pull
- copy_links: true
- verify_host: true
- rsync_opts:
- - --include=/.tox/*/log/*
- - --include=*/
- - --exclude=*
- - --prune-empty-dirs
diff --git a/playbooks/legacy/oslo.db-tox-mysql-python/run.yaml b/playbooks/legacy/oslo.db-tox-mysql-python/run.yaml
deleted file mode 100644
index 5bdd811..0000000
--- a/playbooks/legacy/oslo.db-tox-mysql-python/run.yaml
+++ /dev/null
@@ -1,86 +0,0 @@
-- hosts: all
- name: Autoconverted job legacy-oslo.db-tox-mysql-python from old job gate-oslo.db-tox-mysql-python-ubuntu-xenial
- tasks:
-
- - name: Ensure legacy workspace directory
- file:
- path: '{{ ansible_user_dir }}/workspace'
- state: directory
-
- - shell:
- cmd: |
- set -e
- set -x
- CLONEMAP=`mktemp`
- REQS_DIR=`mktemp -d`
- function cleanup {
- mkdir -p $WORKSPACE
- rm -rf $CLONEMAP $REQS_DIR
- }
- trap cleanup EXIT
- cat > $CLONEMAP << EOF
- clonemap:
- - name: $ZUUL_PROJECT
- dest: .
- EOF
- # zuul cloner works poorly if there are 2 names that are the
- # same in here.
- if [[ "$ZUUL_PROJECT" != "openstack/requirements" ]]; then
- cat >> $CLONEMAP << EOF
- - name: openstack/requirements
- dest: $REQS_DIR
- EOF
- fi
- /usr/zuul-env/bin/zuul-cloner -m $CLONEMAP --cache-dir /opt/git \
- git://git.openstack.org $ZUUL_PROJECT openstack/requirements
- # REQS_DIR is not set for openstack/requirements and there is also
- # no need to copy in this case.
- if [[ "$ZUUL_PROJECT" != "openstack/requirements" ]]; then
- cp $REQS_DIR/upper-constraints.txt ./
- fi
- executable: /bin/bash
- chdir: '{{ ansible_user_dir }}/workspace'
- environment: '{{ zuul | zuul_legacy_vars }}'
-
- - shell:
- cmd: /usr/local/jenkins/slave_scripts/install-distro-packages.sh
- chdir: '{{ ansible_user_dir }}/workspace'
- environment: '{{ zuul | zuul_legacy_vars }}'
-
- - shell:
- cmd: |
- if [ -x tools/test-setup.sh ] ; then
- tools/test-setup.sh
- fi
- chdir: '{{ ansible_user_dir }}/workspace'
- environment: '{{ zuul | zuul_legacy_vars }}'
-
- - shell:
- cmd: |
- set -x
- sudo rm -f /etc/sudoers.d/zuul
- # Prove that general sudo access is actually revoked
- ! sudo -n true
- executable: /bin/bash
- chdir: '{{ ansible_user_dir }}/workspace'
- environment: '{{ zuul | zuul_legacy_vars }}'
-
- - shell:
- cmd: /usr/local/jenkins/slave_scripts/run-tox.sh mysql-python
- chdir: '{{ ansible_user_dir }}/workspace'
- environment: '{{ zuul | zuul_legacy_vars }}'
-
- - shell:
- cmd: |
- OUT=`git ls-files --other --exclude-standard --directory`
- if [ -z "$OUT" ]; then
- echo "No extra files created during test."
- exit 0
- else
- echo "The following un-ignored files were created during the test:"
- echo "$OUT"
- exit 0 # TODO: change to 1 to fail tests.
- fi
- executable: /bin/bash
- chdir: '{{ ansible_user_dir }}/workspace'
- environment: '{{ zuul | zuul_legacy_vars }}'
diff --git a/releasenotes/notes/add_connection_parameters-231aa7d8b7d2d416.yaml b/releasenotes/notes/add_connection_parameters-231aa7d8b7d2d416.yaml
new file mode 100644
index 0000000..c4fa1ab
--- /dev/null
+++ b/releasenotes/notes/add_connection_parameters-231aa7d8b7d2d416.yaml
@@ -0,0 +1,7 @@
+---
+features:
+ - |
+ Added new option connection_parameters which allows SQLAlchemy query
+ parameters to be stated separately from the URL itself, to allow
+ URL-persistence schemes like Nova cells to use controller-local
+ query parameters that aren't broadcast to all other servers.
diff --git a/releasenotes/notes/fix_synchronous_reader-ca442ca9f07470ec.yaml b/releasenotes/notes/fix_synchronous_reader-ca442ca9f07470ec.yaml
new file mode 100644
index 0000000..c842b7d
--- /dev/null
+++ b/releasenotes/notes/fix_synchronous_reader-ca442ca9f07470ec.yaml
@@ -0,0 +1,8 @@
+---
+fixes:
+ - |
+ Repaired the "synchronous_reader" modifier of enginefacade so that it
+ refers to the "writer" engine when set to True, thereby allowing
+ "synchronous" behavior with the writer. When set to False, this is
+ "asynchronous", so this should be associated with the async engines.
+ The flag had the reverse behavior previously.
diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst
index 9b0e0b6..bb88d33 100644
--- a/releasenotes/source/index.rst
+++ b/releasenotes/source/index.rst
@@ -6,6 +6,7 @@
:maxdepth: 1
unreleased
+ queens
pike
ocata
newton
diff --git a/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po b/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po
index e6fcd71..a2ed5a8 100644
--- a/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po
+++ b/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po
@@ -1,18 +1,19 @@
# Andi Chandler <andi@gowling.com>, 2016. #zanata
# Andi Chandler <andi@gowling.com>, 2017. #zanata
+# Andi Chandler <andi@gowling.com>, 2018. #zanata
msgid ""
msgstr ""
"Project-Id-Version: oslo.db Release Notes\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2017-12-05 12:09+0000\n"
+"POT-Creation-Date: 2018-02-20 22:06+0000\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"PO-Revision-Date: 2017-12-05 10:29+0000\n"
+"PO-Revision-Date: 2018-02-18 01:24+0000\n"
"Last-Translator: Andi Chandler <andi@gowling.com>\n"
"Language-Team: English (United Kingdom)\n"
-"Language: en-GB\n"
-"X-Generator: Zanata 3.9.6\n"
+"Language: en_GB\n"
+"X-Generator: Zanata 4.3.3\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
msgid "2.6.0-9"
@@ -117,6 +118,22 @@ msgstr ""
"PyMySQL, which is an adequate replacement. Refer to https://wiki.openstack."
"org/wiki/PyMySQL_evaluation for details."
+msgid "Queens Series Release Notes"
+msgstr "Queens Series Release Notes"
+
+msgid ""
+"Repaired the \"synchronous_reader\" modifier of enginefacade so that it "
+"refers to the \"writer\" engine when set to True, thereby allowing "
+"\"synchronous\" behavior with the writer. When set to False, this is "
+"\"asynchronous\", so this should be associated with the async engines. The "
+"flag had the reverse behavior previously."
+msgstr ""
+"Repaired the \"synchronous_reader\" modifier of enginefacade so that it "
+"refers to the \"writer\" engine when set to True, thereby allowing "
+"\"synchronous\" behaviour with the writer. When set to False, this is "
+"\"asynchronous\", so this should be associated with the async engines. The "
+"flag had the reverse behaviour previously."
+
msgid ""
"The allowed values for the ``connection_debug`` option are now restricted to "
"the range between 0 and 100 (inclusive). Previously a number lower than 0 or "
diff --git a/releasenotes/source/queens.rst b/releasenotes/source/queens.rst
new file mode 100644
index 0000000..36ac616
--- /dev/null
+++ b/releasenotes/source/queens.rst
@@ -0,0 +1,6 @@
+===================================
+ Queens Series Release Notes
+===================================
+
+.. release-notes::
+ :branch: stable/queens
diff --git a/requirements.txt b/requirements.txt
index e2f74f6..a52b216 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,9 +6,13 @@ pbr!=2.1.0,>=2.0.0 # Apache-2.0
alembic>=0.8.10 # MIT
debtcollector>=1.2.0 # Apache-2.0
oslo.i18n>=3.15.3 # Apache-2.0
-oslo.config>=5.1.0 # Apache-2.0
-oslo.utils>=3.31.0 # Apache-2.0
+oslo.config>=5.2.0 # Apache-2.0
+oslo.utils>=3.33.0 # Apache-2.0
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
sqlalchemy-migrate>=0.11.0 # Apache-2.0
stevedore>=1.20.0 # Apache-2.0
six>=1.10.0 # MIT
+# these are used by downstream libraries that require
+# oslo.db as one of their test requirements - do not remove!
+testresources>=2.0.0 # Apache-2.0/BSD
+testscenarios>=0.4 # Apache-2.0/BSD
diff --git a/setup.cfg b/setup.cfg
index c2c5219..f52322b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -25,28 +25,6 @@ mysql =
# or oslo.db[postgresql]
postgresql =
psycopg2>=2.6.2 # LGPL/ZPL
-# Dependencies for testing oslo.db itself.
-test =
- hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
- coverage!=4.4,>=4.0 # Apache-2.0
- doc8>=0.6.0 # Apache-2.0
- eventlet!=0.18.3,!=0.20.1,<0.21.0,>=0.18.2 # MIT
- fixtures>=3.0.0 # Apache-2.0/BSD
- mock>=2.0.0 # BSD
- python-subunit>=1.0.0 # Apache-2.0/BSD
- sphinx>=1.6.2 # BSD
- openstackdocstheme>=1.17.0 # Apache-2.0
- oslotest>=1.10.0 # Apache-2.0
- oslo.context>=2.19.2 # Apache-2.0
- testrepository>=0.0.18 # Apache-2.0/BSD
- testtools>=2.2.0 # MIT
- os-testr>=1.0.0 # Apache-2.0
- reno>=2.5.0 # Apache-2.0
-fixtures =
- testresources>=2.0.0 # Apache-2.0/BSD
- testscenarios>=0.4 # Apache-2.0/BSD
-pifpaf =
- pifpaf>=0.10.0 # Apache-2.0
[files]
packages =
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000..3b06eec
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,18 @@
+# The order of packages is significant, because pip processes them in the order
+# of appearance. Changing the order has an impact on the overall integration
+# process, which may cause wedges in the gate later.
+hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
+coverage!=4.4,>=4.0 # Apache-2.0
+eventlet!=0.18.3,!=0.20.1,>=0.18.2 # MIT
+fixtures>=3.0.0 # Apache-2.0/BSD
+mock>=2.0.0 # BSD
+python-subunit>=1.0.0 # Apache-2.0/BSD
+oslotest>=3.2.0 # Apache-2.0
+oslo.context>=2.19.2 # Apache-2.0
+testrepository>=0.0.18 # Apache-2.0/BSD
+testtools>=2.2.0 # MIT
+os-testr>=1.0.0 # Apache-2.0
+bandit>=1.1.0 # Apache-2.0
+pifpaf>=0.10.0 # Apache-2.0
+PyMySQL>=0.7.6 # MIT License
+psycopg2>=2.6.2 # LGPL/ZPL
diff --git a/tools/tox_install.sh b/tools/tox_install.sh
deleted file mode 100755
index e61b63a..0000000
--- a/tools/tox_install.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env bash
-
-# Client constraint file contains this client version pin that is in conflict
-# with installing the client from source. We should remove the version pin in
-# the constraints file before applying it for from-source installation.
-
-CONSTRAINTS_FILE="$1"
-shift 1
-
-set -e
-
-# NOTE(tonyb): Place this in the tox enviroment's log dir so it will get
-# published to logs.openstack.org for easy debugging.
-localfile="$VIRTUAL_ENV/log/upper-constraints.txt"
-
-if [[ "$CONSTRAINTS_FILE" != http* ]]; then
- CONSTRAINTS_FILE="file://$CONSTRAINTS_FILE"
-fi
-# NOTE(tonyb): need to add curl to bindep.txt if the project supports bindep
-curl "$CONSTRAINTS_FILE" --insecure --progress-bar --output "$localfile"
-
-pip install -c"$localfile" openstack-requirements
-
-# This is the main purpose of the script: Allow local installation of
-# the current repo. It is listed in constraints file and thus any
-# install will be constrained and we need to unconstrain it.
-edit-constraints "$localfile" -- "$CLIENT_NAME"
-
-pip install -c"$localfile" -U "$@"
-exit $?
diff --git a/tox.ini b/tox.ini
index 7df46f5..47f31f1 100644
--- a/tox.ini
+++ b/tox.ini
@@ -3,7 +3,8 @@ minversion = 2.0
envlist = py35,py27,pep8,pip-missing-reqs
[testenv]
-install_command = {toxinidir}/tools/tox_install.sh {env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} {opts} {packages}
+basepython = python3
+install_command = pip install {opts} {packages}
whitelist_externals = bash
env
setenv =
@@ -15,10 +16,10 @@ setenv =
{postgresql,all}: PIFPAF_POSTGRESQL=pifpaf -g OS_TEST_DBAPI_ADMIN_CONNECTION run postgresql --
{mysql,all}: PIFPAF_MYSQL=pifpaf -g OS_TEST_DBAPI_ADMIN_CONNECTION run mysql --
{mysql,postgresql,all}: BASECOMMAND={toxinidir}/tools/run-pifpaf-tests.sh
-
-deps = .[test,fixtures,mysql,postgresql]
- {postgresql,mysql,all}: .[pifpaf]
-
+deps =
+ -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
+ -r{toxinidir}/test-requirements.txt
+ -r{toxinidir}/requirements.txt
commands =
{env:PIFPAF_MYSQL:} {env:PIFPAF_POSTGRESQL:} {env:BASECOMMAND:} '{posargs}'
@@ -29,12 +30,16 @@ commands = pip install SQLAlchemy>=0.9.0,!=0.9.5,<1.0.0
python setup.py testr --slowest --testr-args='{posargs}'
[testenv:py27]
+basepython = python2.7
commands =
env TEST_EVENTLET=0 bash tools/pretty_tox.sh '{posargs}'
env TEST_EVENTLET=1 bash tools/pretty_tox.sh '{posargs}'
[testenv:pep8]
-commands = flake8
+commands =
+ flake8
+ # Run security linter
+ bandit -r oslo_db -x tests -n5 --skip B105,B311
[testenv:venv]
commands = {posargs}
@@ -43,11 +48,13 @@ commands = {posargs}
commands = python setup.py test --coverage --coverage-package-name=oslo_db --testr-args='{posargs}'
[testenv:docs]
+deps = -r{toxinidir}/doc/requirements.txt
commands =
doc8 -e .rst CONTRIBUTING.rst HACKING.rst README.rst doc/source
- python setup.py build_sphinx
+ sphinx-build -b html doc/source doc/build/html
[testenv:releasenotes]
+deps = -r{toxinidir}/doc/requirements.txt
commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
[flake8]
@@ -67,3 +74,9 @@ import_exceptions =
# of the requirements.txt files
deps = pip_missing_reqs
commands = pip-missing-reqs -d --ignore-module=oslo_db* --ignore-module=pkg_resources --ignore-file=oslo_db/tests/* oslo_db
+
+[testenv:lower-constraints]
+deps =
+ -c{toxinidir}/lower-constraints.txt
+ -r{toxinidir}/test-requirements.txt
+ -r{toxinidir}/requirements.txt