summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CONTRIBUTING.rst6
-rw-r--r--README.rst4
-rwxr-xr-xdoc/source/conf.py10
-rw-r--r--doc/source/index.rst10
-rw-r--r--doc/source/readme.rst2
-rw-r--r--doc/source/usage.rst8
-rw-r--r--oslo/db/api.py27
-rw-r--r--oslo/db/concurrency.py81
-rw-r--r--oslo/db/options.py28
-rw-r--r--oslo/db/sqlalchemy/provision.py2
-rw-r--r--oslo/db/sqlalchemy/session.py41
-rw-r--r--oslo/db/sqlalchemy/test_migrations.py13
-rw-r--r--oslo/db/sqlalchemy/utils.py2
-rw-r--r--setup.cfg7
-rw-r--r--tests/sqlalchemy/test_options.py15
-rw-r--r--tests/sqlalchemy/test_sqlalchemy.py4
-rw-r--r--tests/test_api.py8
-rw-r--r--tests/test_concurrency.py108
-rw-r--r--tox.ini4
19 files changed, 317 insertions, 63 deletions
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c458b44..a6a308c 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -1,3 +1,7 @@
+=================
+How to contribute
+=================
+
If you would like to contribute to the development of OpenStack,
you must follow the steps in the "If you're a developer, start here"
section of this page:
@@ -14,4 +18,4 @@ Pull requests submitted through GitHub will be ignored.
Bugs should be filed on Launchpad, not GitHub:
- https://bugs.launchpad.net/oslo.db \ No newline at end of file
+ https://bugs.launchpad.net/oslo.db
diff --git a/README.rst b/README.rst
index b87094b..d5e433c 100644
--- a/README.rst
+++ b/README.rst
@@ -1,5 +1,5 @@
===================================
-oslo.db
+Overview
===================================
oslo.db library
@@ -7,4 +7,4 @@ oslo.db library
* Free software: Apache license
* Documentation: http://docs.openstack.org/developer/oslo.db
* Source: http://git.openstack.org/cgit/openstack/oslo.db
-* Bugs: http://bugs.launchpad.net/oslo \ No newline at end of file
+* Bugs: http://bugs.launchpad.net/oslo
diff --git a/doc/source/conf.py b/doc/source/conf.py
index 510e579..b79e405 100755
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -30,6 +30,14 @@ extensions = [
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
+# A list of glob-style patterns that should be excluded when looking for source
+# files.
+exclude_patterns = [
+ 'api/setup.rst', # workaround for https://launchpad.net/bugs/1260495
+ 'api/tests.*', # avoid of docs generation from tests
+ 'api/oslo.db.openstack.common.*', # skip common modules
+]
+
# The suffix of source filenames.
source_suffix = '.rst'
@@ -72,4 +80,4 @@ latex_documents = [
]
# Example configuration for intersphinx: refer to the Python standard library.
-#intersphinx_mapping = {'http://docs.python.org/': None} \ No newline at end of file
+#intersphinx_mapping = {'http://docs.python.org/': None}
diff --git a/doc/source/index.rst b/doc/source/index.rst
index 10db04b..037fe74 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -1,7 +1,11 @@
-Welcome to oslo.db's documentation!
-===================================
+Welcome to oslo.db documentation!
+=================================
+
+The Oslo database handling library. Provides database connectivity
+to the different backends and helper utils.
Contents:
+---------
.. toctree::
:maxdepth: 2
@@ -12,7 +16,7 @@ Contents:
contributing
Indices and tables
-==================
+------------------
* :ref:`genindex`
* :ref:`modindex`
diff --git a/doc/source/readme.rst b/doc/source/readme.rst
index 6b2b3ec..a6210d3 100644
--- a/doc/source/readme.rst
+++ b/doc/source/readme.rst
@@ -1 +1 @@
-.. include:: ../README.rst \ No newline at end of file
+.. include:: ../../README.rst
diff --git a/doc/source/usage.rst b/doc/source/usage.rst
index be9ff69..0e5a2f2 100644
--- a/doc/source/usage.rst
+++ b/doc/source/usage.rst
@@ -8,6 +8,7 @@ To use oslo.db in a project::
.. code:: python
+ from oslo.config import cfg
from oslo.db.sqlalchemy import session as db_session
_FACADE = None
@@ -15,8 +16,7 @@ To use oslo.db in a project::
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
- _FACADE = db_session.EngineFacade.from_config(
- CONF.database.connection, CONF)
+ _FACADE = db_session.EngineFacade.from_config(cfg.CONF)
return _FACADE
def get_engine():
@@ -48,12 +48,10 @@ To use oslo.db in a project::
from oslo.config import cfg
from oslo.db import api as db_api
- CONF = cfg.CONF
- CONF.import_opt('backend', 'oslo.db.options', group='database')
_BACKEND_MAPPING = {'sqlalchemy': 'project.db.sqlalchemy.api'}
- IMPL = db_api.DBAPI(CONF.database.backend, backend_mapping=_BACKEND_MAPPING)
+ IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING)
def get_engine():
return IMPL.get_engine()
diff --git a/oslo/db/api.py b/oslo/db/api.py
index a2a7d0b..34f7c62 100644
--- a/oslo/db/api.py
+++ b/oslo/db/api.py
@@ -28,6 +28,7 @@ import time
from oslo.db import exception
from oslo.db.openstack.common.gettextutils import _LE
from oslo.db.openstack.common import importutils
+from oslo.db import options
LOG = logging.getLogger(__name__)
@@ -160,3 +161,29 @@ class DBAPI(object):
max_retry_interval=self.max_retry_interval)(attr)
return attr
+
+ @classmethod
+ def from_config(cls, conf, backend_mapping=None, lazy=False):
+ """Initialize DBAPI instance given a config instance.
+
+ :param conf: oslo.config config instance
+ :type conf: oslo.config.cfg.ConfigOpts
+
+ :param backend_mapping: backend name -> module/class to load mapping
+ :type backend_mapping: dict
+
+ :param lazy: load the DB backend lazily on the first DB API method call
+ :type lazy: bool
+
+ """
+
+ conf.register_opts(options.database_opts, 'database')
+
+ return cls(backend_name=conf.database.backend,
+ backend_mapping=backend_mapping,
+ lazy=lazy,
+ use_db_reconnect=conf.database.use_db_reconnect,
+ retry_interval=conf.database.db_retry_interval,
+ inc_retry_interval=conf.database.db_inc_retry_interval,
+ max_retry_interval=conf.database.db_max_retry_interval,
+ max_retries=conf.database.db_max_retries)
diff --git a/oslo/db/concurrency.py b/oslo/db/concurrency.py
new file mode 100644
index 0000000..5134785
--- /dev/null
+++ b/oslo/db/concurrency.py
@@ -0,0 +1,81 @@
+# Copyright 2014 Mirantis.inc
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import logging
+import threading
+
+from oslo.config import cfg
+
+from oslo.db import api
+from oslo.db.openstack.common.gettextutils import _LE
+
+
+LOG = logging.getLogger(__name__)
+
+tpool_opts = [
+ cfg.BoolOpt('use_tpool',
+ default=False,
+ deprecated_name='dbapi_use_tpool',
+ deprecated_group='DEFAULT',
+ help='Enable the experimental use of thread pooling for '
+ 'all DB API calls'),
+]
+
+
+class TpoolDbapiWrapper(object):
+ """DB API wrapper class.
+
+ This wraps the oslo DB API with an option to be able to use eventlet's
+ thread pooling. Since the CONF variable may not be loaded at the time
+ this class is instantiated, we must look at it on the first DB API call.
+ """
+
+ def __init__(self, conf, backend_mapping):
+ self._db_api = None
+ self._backend_mapping = backend_mapping
+ self._conf = conf
+ self._conf.register_opts(tpool_opts, 'database')
+ self._lock = threading.Lock()
+
+ @property
+ def _api(self):
+ if not self._db_api:
+ with self._lock:
+ if not self._db_api:
+ db_api = api.DBAPI.from_config(
+ conf=self._conf, backend_mapping=self._backend_mapping)
+ if self._conf.database.use_tpool:
+ try:
+ from eventlet import tpool
+ except ImportError:
+ LOG.exception(_LE("'eventlet' is required for "
+ "TpoolDbapiWrapper."))
+ raise
+ self._db_api = tpool.Proxy(db_api)
+ else:
+ self._db_api = db_api
+ return self._db_api
+
+ def __getattr__(self, key):
+ return getattr(self._api, key)
+
+
+def list_opts():
+ """Returns a list of oslo.config options available in this module.
+
+ :returns: a list of (group_name, opts) tuples
+ """
+ return [('database', copy.deepcopy(tpool_opts))]
diff --git a/oslo/db/options.py b/oslo/db/options.py
index 126ec49..19058d0 100644
--- a/oslo/db/options.py
+++ b/oslo/db/options.py
@@ -130,26 +130,24 @@ database_opts = [
'count.'),
]
-CONF = cfg.CONF
-CONF.register_opts(database_opts, 'database')
-
-def set_defaults(sql_connection, sqlite_db, max_pool_size=None,
- max_overflow=None, pool_timeout=None):
+def set_defaults(conf, connection=None, sqlite_db=None,
+ max_pool_size=None, max_overflow=None,
+ pool_timeout=None):
"""Set defaults for configuration variables."""
- cfg.set_defaults(database_opts,
- connection=sql_connection,
- sqlite_db=sqlite_db)
- # Update the QueuePool defaults
+
+ conf.register_opts(database_opts, group='database')
+
+ if connection is not None:
+ conf.set_default('connection', connection, group='database')
+ if sqlite_db is not None:
+ conf.set_default('sqlite_db', sqlite_db, group='database')
if max_pool_size is not None:
- cfg.set_defaults(database_opts,
- max_pool_size=max_pool_size)
+ conf.set_default('max_pool_size', max_pool_size, group='database')
if max_overflow is not None:
- cfg.set_defaults(database_opts,
- max_overflow=max_overflow)
+ conf.set_default('max_overflow', max_overflow, group='database')
if pool_timeout is not None:
- cfg.set_defaults(database_opts,
- pool_timeout=pool_timeout)
+ conf.set_default('pool_timeout', pool_timeout, group='database')
def list_opts():
diff --git a/oslo/db/sqlalchemy/provision.py b/oslo/db/sqlalchemy/provision.py
index 598305b..317d7f9 100644
--- a/oslo/db/sqlalchemy/provision.py
+++ b/oslo/db/sqlalchemy/provision.py
@@ -52,7 +52,7 @@ def _execute_sql(engine, sql, driver):
except sqlalchemy.exc.OperationalError:
msg = ('%s does not match database admin '
'credentials or database does not exist.')
- LOG.exception(msg % engine.url)
+ LOG.exception(msg, engine.url)
raise exc.DBConnectionError(msg % engine.url)
diff --git a/oslo/db/sqlalchemy/session.py b/oslo/db/sqlalchemy/session.py
index c97f682..3b3a4ca 100644
--- a/oslo/db/sqlalchemy/session.py
+++ b/oslo/db/sqlalchemy/session.py
@@ -291,6 +291,7 @@ from sqlalchemy.pool import NullPool, StaticPool
from sqlalchemy.sql.expression import literal_column
from oslo.db import exception
+from oslo.db import options
from oslo.db.openstack.common.gettextutils import _LE, _LW
from oslo.db.openstack.common import timeutils
@@ -605,7 +606,8 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
idle_timeout=3600,
connection_debug=0, max_pool_size=None, max_overflow=None,
pool_timeout=None, sqlite_synchronous=True,
- connection_trace=False, max_retries=10, retry_interval=10):
+ connection_trace=False, max_retries=10, retry_interval=10,
+ thread_checkin=True):
"""Return a new SQLAlchemy engine."""
connection_dict = sqlalchemy.engine.url.make_url(sql_connection)
@@ -643,7 +645,8 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
engine = sqlalchemy.create_engine(sql_connection, **engine_args)
- sqlalchemy.event.listen(engine, 'checkin', _thread_yield)
+ if thread_checkin:
+ sqlalchemy.event.listen(engine, 'checkin', _thread_yield)
if engine.name in ('ibm_db_sa', 'mysql', 'postgresql'):
ping_callback = functools.partial(_ping_listener, engine)
@@ -671,7 +674,7 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
remaining = 'infinite'
while True:
msg = _LW('SQL connection failed. %s attempts left.')
- LOG.warning(msg % remaining)
+ LOG.warning(msg, remaining)
if remaining != 'infinite':
remaining -= 1
time.sleep(retry_interval)
@@ -828,7 +831,10 @@ class EngineFacade(object):
(defaults to 10)
:keyword retry_interval: interval between retries of opening a sql
connection (defaults to 10)
-
+ :keyword thread_checkin: boolean that indicates that between each
+ engine checkin event a sleep(0) will occur to
+ allow other greenthreads to run (defaults to
+ True)
"""
super(EngineFacade, self).__init__()
@@ -845,7 +851,8 @@ class EngineFacade(object):
sqlite_synchronous=kwargs.get('sqlite_synchronous', True),
connection_trace=kwargs.get('connection_trace', False),
max_retries=kwargs.get('max_retries', 10),
- retry_interval=kwargs.get('retry_interval', 10))
+ retry_interval=kwargs.get('retry_interval', 10),
+ thread_checkin=kwargs.get('thread_checkin', True))
self._session_maker = get_maker(
engine=self._engine,
autocommit=autocommit,
@@ -859,31 +866,19 @@ class EngineFacade(object):
def get_session(self, **kwargs):
"""Get a Session instance.
- If passed, keyword arguments values override the ones used when the
- sessionmaker instance was created.
-
- :keyword autocommit: use autocommit mode for created Session instances
- :type autocommit: bool
-
- :keyword expire_on_commit: expire session objects on commit
- :type expire_on_commit: bool
+ Keyword arugments will be passed to a sessionmaker instance as is (if
+ passed, they will override the ones used when the sessionmaker instance
+ was created). See SQLAlchemy Session docs for details.
"""
- for arg in kwargs:
- if arg not in ('autocommit', 'expire_on_commit'):
- del kwargs[arg]
-
return self._session_maker(**kwargs)
@classmethod
- def from_config(cls, connection_string, conf,
+ def from_config(cls, conf,
sqlite_fk=False, autocommit=True, expire_on_commit=False):
"""Initialize EngineFacade using oslo.config config instance options.
- :param connection_string: SQLAlchemy connection string
- :type connection_string: string
-
:param conf: oslo.config config instance
:type conf: oslo.config.cfg.ConfigOpts
@@ -898,7 +893,9 @@ class EngineFacade(object):
"""
- return cls(sql_connection=connection_string,
+ conf.register_opts(options.database_opts, 'database')
+
+ return cls(sql_connection=conf.database.connection,
sqlite_fk=sqlite_fk,
autocommit=autocommit,
expire_on_commit=expire_on_commit,
diff --git a/oslo/db/sqlalchemy/test_migrations.py b/oslo/db/sqlalchemy/test_migrations.py
index 661b0a7..5972d03 100644
--- a/oslo/db/sqlalchemy/test_migrations.py
+++ b/oslo/db/sqlalchemy/test_migrations.py
@@ -60,10 +60,10 @@ def _set_db_lock(lock_path=None, lock_prefix=None):
path = lock_path or os.environ.get("OSLO_LOCK_PATH")
lock = lockfile.FileLock(os.path.join(path, lock_prefix))
with lock:
- LOG.debug('Got lock "%s"' % f.__name__)
+ LOG.debug('Got lock "%s"', f.__name__)
return f(*args, **kwargs)
finally:
- LOG.debug('Lock released "%s"' % f.__name__)
+ LOG.debug('Lock released "%s"', f.__name__)
return wrapper
return decorator
@@ -88,7 +88,7 @@ class BaseMigrationTestCase(test_base.BaseTestCase):
# Load test databases from the config file. Only do this
# once. No need to re-run this on each test...
- LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH)
+ LOG.debug('config_path is %s', self.CONFIG_FILE_PATH)
if os.path.exists(self.CONFIG_FILE_PATH):
cp = moves.configparser.RawConfigParser()
try:
@@ -193,7 +193,7 @@ class WalkVersionsMixin(object):
self.migration_api.db_version(engine,
self.REPOSITORY))
- LOG.debug('latest version is %s' % self.REPOSITORY.latest)
+ LOG.debug('latest version is %s', self.REPOSITORY.latest)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
for version in versions:
@@ -264,6 +264,7 @@ class WalkVersionsMixin(object):
if check:
check(engine, data)
except Exception:
- LOG.error(_LE("Failed to migrate to version %s on engine %s") %
- (version, engine))
+ LOG.error(_LE("Failed to migrate to version %(version)s on "
+ "engine %(engine)s"), {'version': version,
+ 'engine': engine})
raise
diff --git a/oslo/db/sqlalchemy/utils.py b/oslo/db/sqlalchemy/utils.py
index 3ecd004..04eb7ce 100644
--- a/oslo/db/sqlalchemy/utils.py
+++ b/oslo/db/sqlalchemy/utils.py
@@ -430,7 +430,7 @@ def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
[table.c.id]).where(delete_condition)
for row in migrate_engine.execute(rows_to_delete_select).fetchall():
LOG.info(_LI("Deleting duplicated row with id: %(id)s from table: "
- "%(table)s") % dict(id=row[0], table=table_name))
+ "%(table)s"), dict(id=row[0], table=table_name))
if use_soft_delete:
delete_statement = table.update().\
diff --git a/setup.cfg b/setup.cfg
index f393ea5..78097e8 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -27,6 +27,7 @@ namespace_packages =
[entry_points]
oslo.config.opts =
oslo.db = oslo.db.options:list_opts
+ oslo.db.concurrency = oslo.db.concurrency:list_opts
oslo.db.migration =
alembic = oslo.db.sqlalchemy.migration_cli.ext_alembic:AlembicExtension
@@ -53,3 +54,9 @@ input_file = oslo.db/locale/oslo.db.pot
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
output_file = oslo.db/locale/oslo.db.pot
+
+[pbr]
+# NOTE(viktors): uncomment ``warnerrors`` line, when setup.cfg we then
+# want to treat sphinx warnings as errors
+# warnerrors = True
+autodoc_index_modules = True
diff --git a/tests/sqlalchemy/test_options.py b/tests/sqlalchemy/test_options.py
index 4870ed8..585b67a 100644
--- a/tests/sqlalchemy/test_options.py
+++ b/tests/sqlalchemy/test_options.py
@@ -14,19 +14,17 @@
from oslo.config import cfg
from oslo.db.openstack.common.fixture import config
+from oslo.db import options
from tests import utils as test_utils
-cfg.CONF.import_opt('connection', 'oslo.db.options',
- group='database')
-
-
class DbApiOptionsTestCase(test_utils.BaseTestCase):
def setUp(self):
super(DbApiOptionsTestCase, self).setUp()
config_fixture = self.useFixture(config.Config())
self.conf = config_fixture.conf
+ self.conf.register_opts(options.database_opts, group='database')
self.config = config_fixture.config
def test_deprecated_session_parameters(self):
@@ -118,3 +116,12 @@ pool_timeout=7
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.backend, 'test_123')
+
+ def test_set_defaults(self):
+ conf = cfg.ConfigOpts()
+
+ options.set_defaults(conf,
+ connection='sqlite:///:memory:')
+
+ self.assertTrue(len(conf.database.items()) > 1)
+ self.assertEqual('sqlite:///:memory:', conf.database.connection)
diff --git a/tests/sqlalchemy/test_sqlalchemy.py b/tests/sqlalchemy/test_sqlalchemy.py
index 9cda622..a6f097b 100644
--- a/tests/sqlalchemy/test_sqlalchemy.py
+++ b/tests/sqlalchemy/test_sqlalchemy.py
@@ -352,13 +352,14 @@ class EngineFacadeTestCase(oslo_test.BaseTestCase):
@mock.patch('oslo.db.sqlalchemy.session.create_engine')
def test_creation_from_config(self, create_engine, get_maker):
conf = mock.MagicMock()
+ conf.database.connection = 'sqlite:///:memory:'
conf.database.items.return_value = [
('connection_debug', 100),
('max_pool_size', 10),
('mysql_sql_mode', 'TRADITIONAL'),
]
- session.EngineFacade.from_config('sqlite:///:memory:', conf,
+ session.EngineFacade.from_config(conf,
autocommit=False,
expire_on_commit=True)
@@ -376,6 +377,7 @@ class EngineFacadeTestCase(oslo_test.BaseTestCase):
connection_trace=mock.ANY,
sqlite_synchronous=mock.ANY,
pool_timeout=mock.ANY,
+ thread_checkin=mock.ANY,
)
get_maker.assert_called_once_with(engine=create_engine(),
autocommit=False,
diff --git a/tests/test_api.py b/tests/test_api.py
index 5534757..98c618f 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -16,6 +16,7 @@
"""Unit tests for DB API."""
import mock
+from oslo.config import cfg
from oslo.db import api
from oslo.db import exception
@@ -78,6 +79,13 @@ class DBAPITestCase(test_utils.BaseTestCase):
dbapi.api_class_call1(1, 'abc')
self.assertIsNotNone(dbapi._backend)
+ def test_dbapi_from_config(self):
+ conf = cfg.ConfigOpts()
+
+ dbapi = api.DBAPI.from_config(conf,
+ backend_mapping={'sqlalchemy': __name__})
+ self.assertIsNotNone(dbapi._backend)
+
class DBReconnectTestCase(DBAPITestCase):
def setUp(self):
diff --git a/tests/test_concurrency.py b/tests/test_concurrency.py
new file mode 100644
index 0000000..cf34bba
--- /dev/null
+++ b/tests/test_concurrency.py
@@ -0,0 +1,108 @@
+# Copyright 2014 Mirantis.inc
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sys
+
+import mock
+
+from oslo.db import concurrency
+from tests import utils as test_utils
+
+FAKE_BACKEND_MAPPING = {'sqlalchemy': 'fake.db.sqlalchemy.api'}
+
+
+class TpoolDbapiWrapperTestCase(test_utils.BaseTestCase):
+
+ def setUp(self):
+ super(TpoolDbapiWrapperTestCase, self).setUp()
+ self.db_api = concurrency.TpoolDbapiWrapper(
+ conf=self.conf, backend_mapping=FAKE_BACKEND_MAPPING)
+
+ # NOTE(akurilin): We are not going to add `eventlet` to `oslo.db` in
+ # requirements (`requirements.txt` and `test-requirements.txt`) due to
+ # the following reasons:
+ # - supporting of eventlet's thread pooling is totally optional;
+ # - we don't need to test `tpool.Proxy` functionality itself,
+ # because it's a tool from the third party library;
+ # - `eventlet` would prevent us from running unit tests on Python 3.x
+ # versions, because it doesn't support them yet.
+ #
+ # As we don't test `tpool.Proxy`, we can safely mock it in tests.
+
+ self.proxy = mock.MagicMock()
+ self.eventlet = mock.MagicMock()
+ self.eventlet.tpool.Proxy.return_value = self.proxy
+ sys.modules['eventlet'] = self.eventlet
+ self.addCleanup(sys.modules.pop, 'eventlet', None)
+
+ @mock.patch('oslo.db.api.DBAPI')
+ def test_db_api_common(self, mock_db_api):
+ # test context:
+ # CONF.database.use_tpool == False
+ # eventlet is installed
+ # expected result:
+ # TpoolDbapiWrapper should wrap DBAPI
+
+ fake_db_api = mock.MagicMock()
+ mock_db_api.from_config.return_value = fake_db_api
+
+ # get access to some db-api method
+ self.db_api.fake_call_1
+
+ mock_db_api.from_config.assert_called_once_with(
+ conf=self.conf, backend_mapping=FAKE_BACKEND_MAPPING)
+ self.assertEqual(self.db_api._db_api, fake_db_api)
+ self.assertFalse(self.eventlet.tpool.Proxy.called)
+
+ # get access to other db-api method to be sure that api didn't changed
+ self.db_api.fake_call_2
+
+ self.assertEqual(self.db_api._db_api, fake_db_api)
+ self.assertFalse(self.eventlet.tpool.Proxy.called)
+ self.assertEqual(1, mock_db_api.from_config.call_count)
+
+ @mock.patch('oslo.db.api.DBAPI')
+ def test_db_api_config_change(self, mock_db_api):
+ # test context:
+ # CONF.database.use_tpool == True
+ # eventlet is installed
+ # expected result:
+ # TpoolDbapiWrapper should wrap tpool proxy
+
+ fake_db_api = mock.MagicMock()
+ mock_db_api.from_config.return_value = fake_db_api
+ self.conf.set_override('use_tpool', True, group='database')
+
+ # get access to some db-api method
+ self.db_api.fake_call
+
+ # CONF.database.use_tpool is True, so we get tpool proxy in this case
+ mock_db_api.from_config.assert_called_once_with(
+ conf=self.conf, backend_mapping=FAKE_BACKEND_MAPPING)
+ self.eventlet.tpool.Proxy.assert_called_once_with(fake_db_api)
+ self.assertEqual(self.db_api._db_api, self.proxy)
+
+ @mock.patch('oslo.db.api.DBAPI')
+ def test_db_api_without_installed_eventlet(self, mock_db_api):
+ # test context:
+ # CONF.database.use_tpool == True
+ # eventlet is not installed
+ # expected result:
+ # raise ImportError
+
+ self.conf.set_override('use_tpool', True, group='database')
+ del sys.modules['eventlet']
+
+ self.assertRaises(ImportError, getattr, self.db_api, 'fake')
diff --git a/tox.ini b/tox.ini
index 1c483a6..fef9a35 100644
--- a/tox.ini
+++ b/tox.ini
@@ -26,6 +26,10 @@ commands = {posargs}
[testenv:cover]
commands = python setup.py testr --coverage --testr-args='{posargs}'
+[testenv:docs]
+commands =
+ python setup.py build_sphinx
+
[flake8]
# H803 skipped on purpose per list discussion.
# E123, E125 skipped as they are invalid PEP-8.