summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CONTRIBUTING.rst6
-rw-r--r--README.rst4
-rwxr-xr-xdoc/source/conf.py10
-rw-r--r--doc/source/index.rst10
-rw-r--r--doc/source/readme.rst2
-rw-r--r--doc/source/usage.rst8
-rw-r--r--openstack-common.conf1
-rw-r--r--oslo/db/api.py27
-rw-r--r--oslo/db/concurrency.py81
-rw-r--r--oslo/db/openstack/common/context.py111
-rw-r--r--oslo/db/options.py32
-rw-r--r--oslo/db/sqlalchemy/provision.py2
-rw-r--r--oslo/db/sqlalchemy/session.py125
-rw-r--r--oslo/db/sqlalchemy/test_migrations.py13
-rw-r--r--oslo/db/sqlalchemy/utils.py143
-rw-r--r--setup.cfg7
-rw-r--r--tests/sqlalchemy/test_options.py15
-rw-r--r--tests/sqlalchemy/test_sqlalchemy.py42
-rw-r--r--tests/sqlalchemy/test_utils.py91
-rw-r--r--tests/test_api.py8
-rw-r--r--tests/test_concurrency.py108
-rw-r--r--tox.ini4
22 files changed, 544 insertions, 306 deletions
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c458b44..a6a308c 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -1,3 +1,7 @@
+=================
+How to contribute
+=================
+
If you would like to contribute to the development of OpenStack,
you must follow the steps in the "If you're a developer, start here"
section of this page:
@@ -14,4 +18,4 @@ Pull requests submitted through GitHub will be ignored.
Bugs should be filed on Launchpad, not GitHub:
- https://bugs.launchpad.net/oslo.db \ No newline at end of file
+ https://bugs.launchpad.net/oslo.db
diff --git a/README.rst b/README.rst
index b87094b..d5e433c 100644
--- a/README.rst
+++ b/README.rst
@@ -1,5 +1,5 @@
===================================
-oslo.db
+Overview
===================================
oslo.db library
@@ -7,4 +7,4 @@ oslo.db library
* Free software: Apache license
* Documentation: http://docs.openstack.org/developer/oslo.db
* Source: http://git.openstack.org/cgit/openstack/oslo.db
-* Bugs: http://bugs.launchpad.net/oslo \ No newline at end of file
+* Bugs: http://bugs.launchpad.net/oslo
diff --git a/doc/source/conf.py b/doc/source/conf.py
index 510e579..b79e405 100755
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -30,6 +30,14 @@ extensions = [
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
+# A list of glob-style patterns that should be excluded when looking for source
+# files.
+exclude_patterns = [
+ 'api/setup.rst', # workaround for https://launchpad.net/bugs/1260495
+ 'api/tests.*', # avoid of docs generation from tests
+ 'api/oslo.db.openstack.common.*', # skip common modules
+]
+
# The suffix of source filenames.
source_suffix = '.rst'
@@ -72,4 +80,4 @@ latex_documents = [
]
# Example configuration for intersphinx: refer to the Python standard library.
-#intersphinx_mapping = {'http://docs.python.org/': None} \ No newline at end of file
+#intersphinx_mapping = {'http://docs.python.org/': None}
diff --git a/doc/source/index.rst b/doc/source/index.rst
index 10db04b..037fe74 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -1,7 +1,11 @@
-Welcome to oslo.db's documentation!
-===================================
+Welcome to oslo.db documentation!
+=================================
+
+The Oslo database handling library. Provides database connectivity
+to the different backends and helper utils.
Contents:
+---------
.. toctree::
:maxdepth: 2
@@ -12,7 +16,7 @@ Contents:
contributing
Indices and tables
-==================
+------------------
* :ref:`genindex`
* :ref:`modindex`
diff --git a/doc/source/readme.rst b/doc/source/readme.rst
index 6b2b3ec..a6210d3 100644
--- a/doc/source/readme.rst
+++ b/doc/source/readme.rst
@@ -1 +1 @@
-.. include:: ../README.rst \ No newline at end of file
+.. include:: ../../README.rst
diff --git a/doc/source/usage.rst b/doc/source/usage.rst
index be9ff69..0e5a2f2 100644
--- a/doc/source/usage.rst
+++ b/doc/source/usage.rst
@@ -8,6 +8,7 @@ To use oslo.db in a project::
.. code:: python
+ from oslo.config import cfg
from oslo.db.sqlalchemy import session as db_session
_FACADE = None
@@ -15,8 +16,7 @@ To use oslo.db in a project::
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
- _FACADE = db_session.EngineFacade.from_config(
- CONF.database.connection, CONF)
+ _FACADE = db_session.EngineFacade.from_config(cfg.CONF)
return _FACADE
def get_engine():
@@ -48,12 +48,10 @@ To use oslo.db in a project::
from oslo.config import cfg
from oslo.db import api as db_api
- CONF = cfg.CONF
- CONF.import_opt('backend', 'oslo.db.options', group='database')
_BACKEND_MAPPING = {'sqlalchemy': 'project.db.sqlalchemy.api'}
- IMPL = db_api.DBAPI(CONF.database.backend, backend_mapping=_BACKEND_MAPPING)
+ IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING)
def get_engine():
return IMPL.get_engine()
diff --git a/openstack-common.conf b/openstack-common.conf
index 63e8e40..1df4b2f 100644
--- a/openstack-common.conf
+++ b/openstack-common.conf
@@ -1,7 +1,6 @@
[DEFAULT]
# The list of modules to copy from oslo-incubator.git
-module=context
module=gettextutils
module=fixture.moxstubout
module=importutils
diff --git a/oslo/db/api.py b/oslo/db/api.py
index a2a7d0b..34f7c62 100644
--- a/oslo/db/api.py
+++ b/oslo/db/api.py
@@ -28,6 +28,7 @@ import time
from oslo.db import exception
from oslo.db.openstack.common.gettextutils import _LE
from oslo.db.openstack.common import importutils
+from oslo.db import options
LOG = logging.getLogger(__name__)
@@ -160,3 +161,29 @@ class DBAPI(object):
max_retry_interval=self.max_retry_interval)(attr)
return attr
+
+ @classmethod
+ def from_config(cls, conf, backend_mapping=None, lazy=False):
+ """Initialize DBAPI instance given a config instance.
+
+ :param conf: oslo.config config instance
+ :type conf: oslo.config.cfg.ConfigOpts
+
+ :param backend_mapping: backend name -> module/class to load mapping
+ :type backend_mapping: dict
+
+ :param lazy: load the DB backend lazily on the first DB API method call
+ :type lazy: bool
+
+ """
+
+ conf.register_opts(options.database_opts, 'database')
+
+ return cls(backend_name=conf.database.backend,
+ backend_mapping=backend_mapping,
+ lazy=lazy,
+ use_db_reconnect=conf.database.use_db_reconnect,
+ retry_interval=conf.database.db_retry_interval,
+ inc_retry_interval=conf.database.db_inc_retry_interval,
+ max_retry_interval=conf.database.db_max_retry_interval,
+ max_retries=conf.database.db_max_retries)
diff --git a/oslo/db/concurrency.py b/oslo/db/concurrency.py
new file mode 100644
index 0000000..5134785
--- /dev/null
+++ b/oslo/db/concurrency.py
@@ -0,0 +1,81 @@
+# Copyright 2014 Mirantis.inc
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import logging
+import threading
+
+from oslo.config import cfg
+
+from oslo.db import api
+from oslo.db.openstack.common.gettextutils import _LE
+
+
+LOG = logging.getLogger(__name__)
+
+tpool_opts = [
+ cfg.BoolOpt('use_tpool',
+ default=False,
+ deprecated_name='dbapi_use_tpool',
+ deprecated_group='DEFAULT',
+ help='Enable the experimental use of thread pooling for '
+ 'all DB API calls'),
+]
+
+
+class TpoolDbapiWrapper(object):
+ """DB API wrapper class.
+
+ This wraps the oslo DB API with an option to be able to use eventlet's
+ thread pooling. Since the CONF variable may not be loaded at the time
+ this class is instantiated, we must look at it on the first DB API call.
+ """
+
+ def __init__(self, conf, backend_mapping):
+ self._db_api = None
+ self._backend_mapping = backend_mapping
+ self._conf = conf
+ self._conf.register_opts(tpool_opts, 'database')
+ self._lock = threading.Lock()
+
+ @property
+ def _api(self):
+ if not self._db_api:
+ with self._lock:
+ if not self._db_api:
+ db_api = api.DBAPI.from_config(
+ conf=self._conf, backend_mapping=self._backend_mapping)
+ if self._conf.database.use_tpool:
+ try:
+ from eventlet import tpool
+ except ImportError:
+ LOG.exception(_LE("'eventlet' is required for "
+ "TpoolDbapiWrapper."))
+ raise
+ self._db_api = tpool.Proxy(db_api)
+ else:
+ self._db_api = db_api
+ return self._db_api
+
+ def __getattr__(self, key):
+ return getattr(self._api, key)
+
+
+def list_opts():
+ """Returns a list of oslo.config options available in this module.
+
+ :returns: a list of (group_name, opts) tuples
+ """
+ return [('database', copy.deepcopy(tpool_opts))]
diff --git a/oslo/db/openstack/common/context.py b/oslo/db/openstack/common/context.py
deleted file mode 100644
index 3eeb445..0000000
--- a/oslo/db/openstack/common/context.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright 2011 OpenStack Foundation.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Simple class that stores security context information in the web request.
-
-Projects should subclass this class if they wish to enhance the request
-context or provide additional information in their specific WSGI pipeline.
-"""
-
-import itertools
-import uuid
-
-
-def generate_request_id():
- return b'req-' + str(uuid.uuid4()).encode('ascii')
-
-
-class RequestContext(object):
-
- """Helper class to represent useful information about a request context.
-
- Stores information about the security context under which the user
- accesses the system, as well as additional request information.
- """
-
- user_idt_format = '{user} {tenant} {domain} {user_domain} {p_domain}'
-
- def __init__(self, auth_token=None, user=None, tenant=None, domain=None,
- user_domain=None, project_domain=None, is_admin=False,
- read_only=False, show_deleted=False, request_id=None,
- instance_uuid=None):
- self.auth_token = auth_token
- self.user = user
- self.tenant = tenant
- self.domain = domain
- self.user_domain = user_domain
- self.project_domain = project_domain
- self.is_admin = is_admin
- self.read_only = read_only
- self.show_deleted = show_deleted
- self.instance_uuid = instance_uuid
- if not request_id:
- request_id = generate_request_id()
- self.request_id = request_id
-
- def to_dict(self):
- user_idt = (
- self.user_idt_format.format(user=self.user or '-',
- tenant=self.tenant or '-',
- domain=self.domain or '-',
- user_domain=self.user_domain or '-',
- p_domain=self.project_domain or '-'))
-
- return {'user': self.user,
- 'tenant': self.tenant,
- 'domain': self.domain,
- 'user_domain': self.user_domain,
- 'project_domain': self.project_domain,
- 'is_admin': self.is_admin,
- 'read_only': self.read_only,
- 'show_deleted': self.show_deleted,
- 'auth_token': self.auth_token,
- 'request_id': self.request_id,
- 'instance_uuid': self.instance_uuid,
- 'user_identity': user_idt}
-
-
-def get_admin_context(show_deleted=False):
- context = RequestContext(None,
- tenant=None,
- is_admin=True,
- show_deleted=show_deleted)
- return context
-
-
-def get_context_from_function_and_args(function, args, kwargs):
- """Find an arg of type RequestContext and return it.
-
- This is useful in a couple of decorators where we don't
- know much about the function we're wrapping.
- """
-
- for arg in itertools.chain(kwargs.values(), args):
- if isinstance(arg, RequestContext):
- return arg
-
- return None
-
-
-def is_user_context(context):
- """Indicates if the request context is a normal user."""
- if not context:
- return False
- if context.is_admin:
- return False
- if not context.user_id or not context.project_id:
- return False
- return True
diff --git a/oslo/db/options.py b/oslo/db/options.py
index 126ec49..72e626c 100644
--- a/oslo/db/options.py
+++ b/oslo/db/options.py
@@ -39,6 +39,10 @@ database_opts = [
group='DATABASE'),
cfg.DeprecatedOpt('connection',
group='sql'), ]),
+ cfg.StrOpt('slave_connection',
+ secret=True,
+ help='The SQLAlchemy connection string to use to connect to the'
+ ' slave database.'),
cfg.StrOpt('mysql_sql_mode',
default='TRADITIONAL',
help='The SQL mode to be used for MySQL sessions. '
@@ -130,26 +134,24 @@ database_opts = [
'count.'),
]
-CONF = cfg.CONF
-CONF.register_opts(database_opts, 'database')
-
-def set_defaults(sql_connection, sqlite_db, max_pool_size=None,
- max_overflow=None, pool_timeout=None):
+def set_defaults(conf, connection=None, sqlite_db=None,
+ max_pool_size=None, max_overflow=None,
+ pool_timeout=None):
"""Set defaults for configuration variables."""
- cfg.set_defaults(database_opts,
- connection=sql_connection,
- sqlite_db=sqlite_db)
- # Update the QueuePool defaults
+
+ conf.register_opts(database_opts, group='database')
+
+ if connection is not None:
+ conf.set_default('connection', connection, group='database')
+ if sqlite_db is not None:
+ conf.set_default('sqlite_db', sqlite_db, group='database')
if max_pool_size is not None:
- cfg.set_defaults(database_opts,
- max_pool_size=max_pool_size)
+ conf.set_default('max_pool_size', max_pool_size, group='database')
if max_overflow is not None:
- cfg.set_defaults(database_opts,
- max_overflow=max_overflow)
+ conf.set_default('max_overflow', max_overflow, group='database')
if pool_timeout is not None:
- cfg.set_defaults(database_opts,
- pool_timeout=pool_timeout)
+ conf.set_default('pool_timeout', pool_timeout, group='database')
def list_opts():
diff --git a/oslo/db/sqlalchemy/provision.py b/oslo/db/sqlalchemy/provision.py
index 598305b..317d7f9 100644
--- a/oslo/db/sqlalchemy/provision.py
+++ b/oslo/db/sqlalchemy/provision.py
@@ -52,7 +52,7 @@ def _execute_sql(engine, sql, driver):
except sqlalchemy.exc.OperationalError:
msg = ('%s does not match database admin '
'credentials or database does not exist.')
- LOG.exception(msg % engine.url)
+ LOG.exception(msg, engine.url)
raise exc.DBConnectionError(msg % engine.url)
diff --git a/oslo/db/sqlalchemy/session.py b/oslo/db/sqlalchemy/session.py
index c97f682..056644f 100644
--- a/oslo/db/sqlalchemy/session.py
+++ b/oslo/db/sqlalchemy/session.py
@@ -291,6 +291,7 @@ from sqlalchemy.pool import NullPool, StaticPool
from sqlalchemy.sql.expression import literal_column
from oslo.db import exception
+from oslo.db import options
from oslo.db.openstack.common.gettextutils import _LE, _LW
from oslo.db.openstack.common import timeutils
@@ -605,7 +606,8 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
idle_timeout=3600,
connection_debug=0, max_pool_size=None, max_overflow=None,
pool_timeout=None, sqlite_synchronous=True,
- connection_trace=False, max_retries=10, retry_interval=10):
+ connection_trace=False, max_retries=10, retry_interval=10,
+ thread_checkin=True):
"""Return a new SQLAlchemy engine."""
connection_dict = sqlalchemy.engine.url.make_url(sql_connection)
@@ -643,7 +645,8 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
engine = sqlalchemy.create_engine(sql_connection, **engine_args)
- sqlalchemy.event.listen(engine, 'checkin', _thread_yield)
+ if thread_checkin:
+ sqlalchemy.event.listen(engine, 'checkin', _thread_yield)
if engine.name in ('ibm_db_sa', 'mysql', 'postgresql'):
ping_callback = functools.partial(_ping_listener, engine)
@@ -671,7 +674,7 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
remaining = 'infinite'
while True:
msg = _LW('SQL connection failed. %s attempts left.')
- LOG.warning(msg % remaining)
+ LOG.warning(msg, remaining)
if remaining != 'infinite':
remaining -= 1
time.sleep(retry_interval)
@@ -790,11 +793,22 @@ class EngineFacade(object):
"""
- def __init__(self, sql_connection,
+ def __init__(self, sql_connection, slave_connection=None,
sqlite_fk=False, autocommit=True,
expire_on_commit=False, **kwargs):
"""Initialize engine and sessionmaker instances.
+ :param sql_connection: the connection string for the database to use
+ :type sql_connection: string
+
+ :param slave_connection: the connection string for the 'slave' database
+ to use. If not provided, the master database
+ will be used for all operations. Note: this
+ is meant to be used for offloading of read
+ operations to asynchronously replicated slaves
+ to reduce the load on the master database.
+ :type slave_connection: string
+
:param sqlite_fk: enable foreign keys in SQLite
:type sqlite_fk: bool
@@ -828,62 +842,88 @@ class EngineFacade(object):
(defaults to 10)
:keyword retry_interval: interval between retries of opening a sql
connection (defaults to 10)
-
+ :keyword thread_checkin: boolean that indicates that between each
+ engine checkin event a sleep(0) will occur to
+ allow other greenthreads to run (defaults to
+ True)
"""
super(EngineFacade, self).__init__()
- self._engine = create_engine(
- sql_connection=sql_connection,
- sqlite_fk=sqlite_fk,
- mysql_sql_mode=kwargs.get('mysql_sql_mode', 'TRADITIONAL'),
- idle_timeout=kwargs.get('idle_timeout', 3600),
- connection_debug=kwargs.get('connection_debug', 0),
- max_pool_size=kwargs.get('max_pool_size'),
- max_overflow=kwargs.get('max_overflow'),
- pool_timeout=kwargs.get('pool_timeout'),
- sqlite_synchronous=kwargs.get('sqlite_synchronous', True),
- connection_trace=kwargs.get('connection_trace', False),
- max_retries=kwargs.get('max_retries', 10),
- retry_interval=kwargs.get('retry_interval', 10))
- self._session_maker = get_maker(
- engine=self._engine,
- autocommit=autocommit,
- expire_on_commit=expire_on_commit)
-
- def get_engine(self):
- """Get the engine instance (note, that it's shared)."""
+ engine_kwargs = {
+ 'sqlite_fk': sqlite_fk,
+ 'mysql_sql_mode': kwargs.get('mysql_sql_mode', 'TRADITIONAL'),
+ 'idle_timeout': kwargs.get('idle_timeout', 3600),
+ 'connection_debug': kwargs.get('connection_debug', 0),
+ 'max_pool_size': kwargs.get('max_pool_size'),
+ 'max_overflow': kwargs.get('max_overflow'),
+ 'pool_timeout': kwargs.get('pool_timeout'),
+ 'sqlite_synchronous': kwargs.get('sqlite_synchronous', True),
+ 'connection_trace': kwargs.get('connection_trace', False),
+ 'max_retries': kwargs.get('max_retries', 10),
+ 'retry_interval': kwargs.get('retry_interval', 10),
+ 'thread_checkin': kwargs.get('thread_checkin', True)
+ }
+ maker_kwargs = {
+ 'autocommit': autocommit,
+ 'expire_on_commit': expire_on_commit
+ }
+
+ self._engine = create_engine(sql_connection=sql_connection,
+ **engine_kwargs)
+ self._session_maker = get_maker(engine=self._engine,
+ **maker_kwargs)
+ if slave_connection:
+ self._slave_engine = create_engine(sql_connection=slave_connection,
+ **engine_kwargs)
+ self._slave_session_maker = get_maker(engine=self._slave_engine,
+ **maker_kwargs)
+ else:
+ self._slave_engine = None
+ self._slave_session_maker = None
+
+ def get_engine(self, use_slave=False):
+ """Get the engine instance (note, that it's shared).
+
+ :param use_slave: if possible, use 'slave' database for this engine.
+ If the connection string for the slave database
+ wasn't provided, 'master' engine will be returned.
+ (defaults to False)
+ :type use_slave: bool
+
+ """
+
+ if use_slave and self._slave_engine:
+ return self._slave_engine
return self._engine
- def get_session(self, **kwargs):
+ def get_session(self, use_slave=False, **kwargs):
"""Get a Session instance.
- If passed, keyword arguments values override the ones used when the
- sessionmaker instance was created.
+ :param use_slave: if possible, use 'slave' database connection for
+ this session. If the connection string for the
+ slave database wasn't provided, a session bound
+ to the 'master' engine will be returned.
+ (defaults to False)
+ :type use_slave: bool
- :keyword autocommit: use autocommit mode for created Session instances
- :type autocommit: bool
-
- :keyword expire_on_commit: expire session objects on commit
- :type expire_on_commit: bool
+ Keyword arugments will be passed to a sessionmaker instance as is (if
+ passed, they will override the ones used when the sessionmaker instance
+ was created). See SQLAlchemy Session docs for details.
"""
- for arg in kwargs:
- if arg not in ('autocommit', 'expire_on_commit'):
- del kwargs[arg]
+ if use_slave and self._slave_session_maker:
+ return self._slave_session_maker(**kwargs)
return self._session_maker(**kwargs)
@classmethod
- def from_config(cls, connection_string, conf,
+ def from_config(cls, conf,
sqlite_fk=False, autocommit=True, expire_on_commit=False):
"""Initialize EngineFacade using oslo.config config instance options.
- :param connection_string: SQLAlchemy connection string
- :type connection_string: string
-
:param conf: oslo.config config instance
:type conf: oslo.config.cfg.ConfigOpts
@@ -898,7 +938,10 @@ class EngineFacade(object):
"""
- return cls(sql_connection=connection_string,
+ conf.register_opts(options.database_opts, 'database')
+
+ return cls(sql_connection=conf.database.connection,
+ slave_connection=conf.database.slave_connection,
sqlite_fk=sqlite_fk,
autocommit=autocommit,
expire_on_commit=expire_on_commit,
diff --git a/oslo/db/sqlalchemy/test_migrations.py b/oslo/db/sqlalchemy/test_migrations.py
index 661b0a7..5972d03 100644
--- a/oslo/db/sqlalchemy/test_migrations.py
+++ b/oslo/db/sqlalchemy/test_migrations.py
@@ -60,10 +60,10 @@ def _set_db_lock(lock_path=None, lock_prefix=None):
path = lock_path or os.environ.get("OSLO_LOCK_PATH")
lock = lockfile.FileLock(os.path.join(path, lock_prefix))
with lock:
- LOG.debug('Got lock "%s"' % f.__name__)
+ LOG.debug('Got lock "%s"', f.__name__)
return f(*args, **kwargs)
finally:
- LOG.debug('Lock released "%s"' % f.__name__)
+ LOG.debug('Lock released "%s"', f.__name__)
return wrapper
return decorator
@@ -88,7 +88,7 @@ class BaseMigrationTestCase(test_base.BaseTestCase):
# Load test databases from the config file. Only do this
# once. No need to re-run this on each test...
- LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH)
+ LOG.debug('config_path is %s', self.CONFIG_FILE_PATH)
if os.path.exists(self.CONFIG_FILE_PATH):
cp = moves.configparser.RawConfigParser()
try:
@@ -193,7 +193,7 @@ class WalkVersionsMixin(object):
self.migration_api.db_version(engine,
self.REPOSITORY))
- LOG.debug('latest version is %s' % self.REPOSITORY.latest)
+ LOG.debug('latest version is %s', self.REPOSITORY.latest)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
for version in versions:
@@ -264,6 +264,7 @@ class WalkVersionsMixin(object):
if check:
check(engine, data)
except Exception:
- LOG.error(_LE("Failed to migrate to version %s on engine %s") %
- (version, engine))
+ LOG.error(_LE("Failed to migrate to version %(version)s on "
+ "engine %(engine)s"), {'version': version,
+ 'engine': engine})
raise
diff --git a/oslo/db/sqlalchemy/utils.py b/oslo/db/sqlalchemy/utils.py
index 4e17fc7..04eb7ce 100644
--- a/oslo/db/sqlalchemy/utils.py
+++ b/oslo/db/sqlalchemy/utils.py
@@ -29,14 +29,12 @@ from sqlalchemy import func
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
-from sqlalchemy import or_
from sqlalchemy.sql.expression import literal_column
from sqlalchemy.sql.expression import UpdateBase
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy.types import NullType
-from oslo.db.openstack.common import context as request_context
from oslo.db.openstack.common.gettextutils import _, _LI, _LW
from oslo.db.openstack.common import timeutils
from oslo.db.sqlalchemy import models
@@ -157,46 +155,37 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
return query
-def _read_deleted_filter(query, db_model, read_deleted):
+def _read_deleted_filter(query, db_model, deleted):
if 'deleted' not in db_model.__table__.columns:
raise ValueError(_("There is no `deleted` column in `%s` table. "
"Project doesn't use soft-deleted feature.")
% db_model.__name__)
default_deleted_value = db_model.__table__.c.deleted.default.arg
- if read_deleted == 'no':
- query = query.filter(db_model.deleted == default_deleted_value)
- elif read_deleted == 'yes':
- pass # omit the filter to include deleted and active
- elif read_deleted == 'only':
+ if deleted:
query = query.filter(db_model.deleted != default_deleted_value)
else:
- raise ValueError(_("Unrecognized read_deleted value '%s'")
- % read_deleted)
+ query = query.filter(db_model.deleted == default_deleted_value)
return query
-def _project_filter(query, db_model, context, project_only):
- if project_only and 'project_id' not in db_model.__table__.columns:
+def _project_filter(query, db_model, project_id):
+ if 'project_id' not in db_model.__table__.columns:
raise ValueError(_("There is no `project_id` column in `%s` table.")
% db_model.__name__)
- if request_context.is_user_context(context) and project_only:
- if project_only == 'allow_none':
- is_none = None
- query = query.filter(or_(db_model.project_id == context.project_id,
- db_model.project_id == is_none))
- else:
- query = query.filter(db_model.project_id == context.project_id)
+ if isinstance(project_id, (list, tuple, set)):
+ query = query.filter(db_model.project_id.in_(project_id))
+ else:
+ query = query.filter(db_model.project_id == project_id)
return query
-def model_query(context, model, session, args=None, project_only=False,
- read_deleted=None):
- """Query helper that accounts for context's `read_deleted` field.
+def model_query(model, session, args=None, **kwargs):
+ """Query helper for db.sqlalchemy api methods.
- :param context: context to query under
+ This accounts for `deleted` and `project_id` fields.
:param model: Model to query. Must be a subclass of ModelBase.
:type model: models.ModelBase
@@ -207,44 +196,100 @@ def model_query(context, model, session, args=None, project_only=False,
:param args: Arguments to query. If None - model is used.
:type args: tuple
- :param project_only: If present and context is user-type, then restrict
- query to match the context's project_id. If set to
- 'allow_none', restriction includes project_id = None.
- :type project_only: bool
+ Keyword arguments:
+
+ :keyword project_id: If present, allows filtering by project_id(s).
+ Can be either a project_id value, or an iterable of
+ project_id values, or None. If an iterable is passed,
+ only rows whose project_id column value is on the
+ `project_id` list will be returned. If None is passed,
+ only rows which are not bound to any project, will be
+ returned.
+ :type project_id: iterable,
+ model.__table__.columns.project_id.type,
+ None type
+
+ :keyword deleted: If present, allows filtering by deleted field.
+ If True is passed, only deleted entries will be
+ returned, if False - only existing entries.
+ :type deleted: bool
- :param read_deleted: If present, overrides context's read_deleted field.
- :type read_deleted: bool
Usage:
- ..code:: python
+ .. code-block:: python
- result = (utils.model_query(context, models.Instance, session=session)
- .filter_by(uuid=instance_uuid)
- .all())
+ from oslo.db.sqlalchemy import utils
- query = utils.model_query(
- context, Node,
- session=session,
- args=(func.count(Node.id), func.sum(Node.ram))
- ).filter_by(project_id=project_id)
- """
+ def get_instance_by_uuid(uuid):
+ session = get_session()
+ with session.begin()
+ return (utils.model_query(models.Instance, session=session)
+ .filter(models.Instance.uuid == uuid)
+ .first())
- if not read_deleted:
- if hasattr(context, 'read_deleted'):
- # NOTE(viktors): some projects use `read_deleted` attribute in
- # their contexts instead of `show_deleted`.
- read_deleted = context.read_deleted
- else:
- read_deleted = context.show_deleted
+ def get_nodes_stat():
+ data = (Node.id, Node.cpu, Node.ram, Node.hdd)
+
+ session = get_session()
+ with session.begin()
+ return utils.model_query(Node, session=session, args=data).all()
+
+ Also you can create your own helper, based on ``utils.model_query()``.
+ For example, it can be useful if you plan to use ``project_id`` and
+ ``deleted`` parameters from project's ``context``
+
+ .. code-block:: python
+
+ from oslo.db.sqlalchemy import utils
+
+
+ def _model_query(context, model, session=None, args=None,
+ project_id=None, project_only=False,
+ read_deleted=None):
+
+ # We suppose, that functions ``_get_project_id()`` and
+ # ``_get_deleted()`` should handle passed parameters and
+ # context object (for example, decide, if we need to restrict a user
+ # to query his own entries by project_id or only allow admin to read
+ # deleted entries). For return values, we expect to get
+ # ``project_id`` and ``deleted``, which are suitable for the
+ # ``model_query()`` signature.
+ kwargs = {}
+ if project_id is not None:
+ kwargs['project_id'] = _get_project_id(context, project_id,
+ project_only)
+ if read_deleted is not None:
+ kwargs['deleted'] = _get_deleted_dict(context, read_deleted)
+ session = session or get_session()
+
+ with session.begin():
+ return utils.model_query(model, session=session,
+ args=args, **kwargs)
+
+ def get_instance_by_uuid(context, uuid):
+ return (_model_query(context, models.Instance, read_deleted='yes')
+ .filter(models.Instance.uuid == uuid)
+ .first())
+
+ def get_nodes_data(context, project_id, project_only='allow_none'):
+ data = (Node.id, Node.cpu, Node.ram, Node.hdd)
+
+ return (_model_query(context, Node, args=data, project_id=project_id,
+ project_only=project_only)
+ .all())
+
+ """
if not issubclass(model, models.ModelBase):
raise TypeError(_("model should be a subclass of ModelBase"))
query = session.query(model) if not args else session.query(*args)
- query = _read_deleted_filter(query, model, read_deleted)
- query = _project_filter(query, model, context, project_only)
+ if 'deleted' in kwargs:
+ query = _read_deleted_filter(query, model, kwargs['deleted'])
+ if 'project_id' in kwargs:
+ query = _project_filter(query, model, kwargs['project_id'])
return query
@@ -385,7 +430,7 @@ def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
[table.c.id]).where(delete_condition)
for row in migrate_engine.execute(rows_to_delete_select).fetchall():
LOG.info(_LI("Deleting duplicated row with id: %(id)s from table: "
- "%(table)s") % dict(id=row[0], table=table_name))
+ "%(table)s"), dict(id=row[0], table=table_name))
if use_soft_delete:
delete_statement = table.update().\
diff --git a/setup.cfg b/setup.cfg
index f393ea5..78097e8 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -27,6 +27,7 @@ namespace_packages =
[entry_points]
oslo.config.opts =
oslo.db = oslo.db.options:list_opts
+ oslo.db.concurrency = oslo.db.concurrency:list_opts
oslo.db.migration =
alembic = oslo.db.sqlalchemy.migration_cli.ext_alembic:AlembicExtension
@@ -53,3 +54,9 @@ input_file = oslo.db/locale/oslo.db.pot
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
output_file = oslo.db/locale/oslo.db.pot
+
+[pbr]
+# NOTE(viktors): uncomment ``warnerrors`` line, when setup.cfg we then
+# want to treat sphinx warnings as errors
+# warnerrors = True
+autodoc_index_modules = True
diff --git a/tests/sqlalchemy/test_options.py b/tests/sqlalchemy/test_options.py
index 4870ed8..585b67a 100644
--- a/tests/sqlalchemy/test_options.py
+++ b/tests/sqlalchemy/test_options.py
@@ -14,19 +14,17 @@
from oslo.config import cfg
from oslo.db.openstack.common.fixture import config
+from oslo.db import options
from tests import utils as test_utils
-cfg.CONF.import_opt('connection', 'oslo.db.options',
- group='database')
-
-
class DbApiOptionsTestCase(test_utils.BaseTestCase):
def setUp(self):
super(DbApiOptionsTestCase, self).setUp()
config_fixture = self.useFixture(config.Config())
self.conf = config_fixture.conf
+ self.conf.register_opts(options.database_opts, group='database')
self.config = config_fixture.config
def test_deprecated_session_parameters(self):
@@ -118,3 +116,12 @@ pool_timeout=7
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.backend, 'test_123')
+
+ def test_set_defaults(self):
+ conf = cfg.ConfigOpts()
+
+ options.set_defaults(conf,
+ connection='sqlite:///:memory:')
+
+ self.assertTrue(len(conf.database.items()) > 1)
+ self.assertEqual('sqlite:///:memory:', conf.database.connection)
diff --git a/tests/sqlalchemy/test_sqlalchemy.py b/tests/sqlalchemy/test_sqlalchemy.py
index 9cda622..d526004 100644
--- a/tests/sqlalchemy/test_sqlalchemy.py
+++ b/tests/sqlalchemy/test_sqlalchemy.py
@@ -352,13 +352,15 @@ class EngineFacadeTestCase(oslo_test.BaseTestCase):
@mock.patch('oslo.db.sqlalchemy.session.create_engine')
def test_creation_from_config(self, create_engine, get_maker):
conf = mock.MagicMock()
+ conf.database.connection = 'sqlite:///:memory:'
+ conf.database.slave_connection = None
conf.database.items.return_value = [
('connection_debug', 100),
('max_pool_size', 10),
('mysql_sql_mode', 'TRADITIONAL'),
]
- session.EngineFacade.from_config('sqlite:///:memory:', conf,
+ session.EngineFacade.from_config(conf,
autocommit=False,
expire_on_commit=True)
@@ -376,11 +378,49 @@ class EngineFacadeTestCase(oslo_test.BaseTestCase):
connection_trace=mock.ANY,
sqlite_synchronous=mock.ANY,
pool_timeout=mock.ANY,
+ thread_checkin=mock.ANY,
)
get_maker.assert_called_once_with(engine=create_engine(),
autocommit=False,
expire_on_commit=True)
+ def test_slave_connection(self):
+ paths = self.create_tempfiles([('db.master', ''), ('db.slave', '')],
+ ext='')
+ master_path = 'sqlite:///' + paths[0]
+ slave_path = 'sqlite:///' + paths[1]
+
+ facade = session.EngineFacade(
+ sql_connection=master_path,
+ slave_connection=slave_path
+ )
+
+ master = facade.get_engine()
+ self.assertEqual(master_path, str(master.url))
+ slave = facade.get_engine(use_slave=True)
+ self.assertEqual(slave_path, str(slave.url))
+
+ master_session = facade.get_session()
+ self.assertEqual(master_path, str(master_session.bind.url))
+ slave_session = facade.get_session(use_slave=True)
+ self.assertEqual(slave_path, str(slave_session.bind.url))
+
+ def test_slave_connection_string_not_provided(self):
+ master_path = 'sqlite:///' + self.create_tempfiles(
+ [('db.master', '')], ext='')[0]
+
+ facade = session.EngineFacade(sql_connection=master_path)
+
+ master = facade.get_engine()
+ slave = facade.get_engine(use_slave=True)
+ self.assertIs(master, slave)
+ self.assertEqual(master_path, str(master.url))
+
+ master_session = facade.get_session()
+ self.assertEqual(master_path, str(master_session.bind.url))
+ slave_session = facade.get_session(use_slave=True)
+ self.assertEqual(master_path, str(slave_session.bind.url))
+
class MysqlSetCallbackTest(oslo_test.BaseTestCase):
diff --git a/tests/sqlalchemy/test_utils.py b/tests/sqlalchemy/test_utils.py
index 5ba80a4..36a2b14 100644
--- a/tests/sqlalchemy/test_utils.py
+++ b/tests/sqlalchemy/test_utils.py
@@ -741,109 +741,72 @@ class TestModelQuery(test_base.BaseTestCase):
self.session = mock.MagicMock()
self.session.query.return_value = self.session.query
self.session.query.filter.return_value = self.session.query
- self.user_context = mock.MagicMock(is_admin=False, read_deleted='yes',
- user_id=42, project_id=43)
def test_wrong_model(self):
- self.assertRaises(TypeError, utils.model_query, self.user_context,
+ self.assertRaises(TypeError, utils.model_query,
FakeModel, session=self.session)
def test_no_soft_deleted(self):
- self.assertRaises(ValueError, utils.model_query, self.user_context,
- MyModel, session=self.session)
+ self.assertRaises(ValueError, utils.model_query,
+ MyModel, session=self.session, deleted=True)
- def test_read_deleted_only(self):
+ def test_deleted_false(self):
mock_query = utils.model_query(
- self.user_context, MyModelSoftDeleted,
- session=self.session, read_deleted='only')
+ MyModelSoftDeleted, session=self.session, deleted=False)
deleted_filter = mock_query.filter.call_args[0][0]
self.assertEqual(str(deleted_filter),
- 'soft_deleted_test_model.deleted != :deleted_1')
+ 'soft_deleted_test_model.deleted = :deleted_1')
self.assertEqual(deleted_filter.right.value,
MyModelSoftDeleted.__mapper__.c.deleted.default.arg)
- def test_read_deleted_no(self):
+ def test_deleted_true(self):
mock_query = utils.model_query(
- self.user_context, MyModelSoftDeleted,
- session=self.session, read_deleted='no')
+ MyModelSoftDeleted, session=self.session, deleted=True)
deleted_filter = mock_query.filter.call_args[0][0]
self.assertEqual(str(deleted_filter),
- 'soft_deleted_test_model.deleted = :deleted_1')
+ 'soft_deleted_test_model.deleted != :deleted_1')
self.assertEqual(deleted_filter.right.value,
MyModelSoftDeleted.__mapper__.c.deleted.default.arg)
- def test_read_deleted_yes(self):
- mock_query = utils.model_query(
- self.user_context, MyModelSoftDeleted,
- session=self.session, read_deleted='yes')
-
- self.assertEqual(mock_query.filter.call_count, 0)
+ @mock.patch.object(utils, "_read_deleted_filter")
+ def test_no_deleted_value(self, _read_deleted_filter):
+ utils.model_query(MyModelSoftDeleted, session=self.session)
+ self.assertEqual(_read_deleted_filter.call_count, 0)
- def test_wrong_read_deleted(self):
- self.assertRaises(ValueError, utils.model_query, self.user_context,
- MyModelSoftDeleted, session=self.session,
- read_deleted='ololo')
+ def test_project_filter(self):
+ project_id = 10
- def test_project_only_true(self):
mock_query = utils.model_query(
- self.user_context, MyModelSoftDeletedProjectId,
- session=self.session, project_only=True)
+ MyModelSoftDeletedProjectId, session=self.session,
+ project_only=True, project_id=project_id)
deleted_filter = mock_query.filter.call_args[0][0]
self.assertEqual(
str(deleted_filter),
'soft_deleted_project_id_test_model.project_id = :project_id_1')
- self.assertEqual(deleted_filter.right.value,
- self.user_context.project_id)
+ self.assertEqual(deleted_filter.right.value, project_id)
def test_project_filter_wrong_model(self):
- self.assertRaises(ValueError, utils.model_query, self.user_context,
+ self.assertRaises(ValueError, utils.model_query,
MyModelSoftDeleted, session=self.session,
- project_only=True)
+ project_id=10)
- def test_read_deleted_allow_none(self):
+ def test_project_filter_allow_none(self):
mock_query = utils.model_query(
- self.user_context, MyModelSoftDeletedProjectId,
- session=self.session, project_only='allow_none')
+ MyModelSoftDeletedProjectId,
+ session=self.session, project_id=(10, None))
self.assertEqual(
str(mock_query.filter.call_args[0][0]),
- 'soft_deleted_project_id_test_model.project_id = :project_id_1 OR'
- ' soft_deleted_project_id_test_model.project_id IS NULL'
+ 'soft_deleted_project_id_test_model.project_id'
+ ' IN (:project_id_1, NULL)'
)
- @mock.patch.object(utils, "_read_deleted_filter")
- @mock.patch.object(utils, "_project_filter")
- def test_context_show_deleted(self, _project_filter, _read_deleted_filter):
- user_context = mock.MagicMock(is_admin=False, show_deleted='yes',
- user_id=42, project_id=43)
- delattr(user_context, 'read_deleted')
- _read_deleted_filter.return_value = self.session.query
- _project_filter.return_value = self.session.query
- utils.model_query(user_context, MyModel,
- args=(MyModel.id,), session=self.session)
-
- self.session.query.assert_called_with(MyModel.id)
- _read_deleted_filter.assert_called_with(
- self.session.query, MyModel, user_context.show_deleted)
- _project_filter.assert_called_with(
- self.session.query, MyModel, user_context, False)
-
- @mock.patch.object(utils, "_read_deleted_filter")
- @mock.patch.object(utils, "_project_filter")
- def test_model_query_common(self, _project_filter, _read_deleted_filter):
- _read_deleted_filter.return_value = self.session.query
- _project_filter.return_value = self.session.query
- utils.model_query(self.user_context, MyModel,
- args=(MyModel.id,), session=self.session)
-
+ def test_model_query_common(self):
+ utils.model_query(MyModel, args=(MyModel.id,), session=self.session)
self.session.query.assert_called_with(MyModel.id)
- _read_deleted_filter.assert_called_with(
- self.session.query, MyModel, self.user_context.read_deleted)
- _project_filter.assert_called_with(
- self.session.query, MyModel, self.user_context, False)
class TestUtils(db_test_base.DbTestCase):
diff --git a/tests/test_api.py b/tests/test_api.py
index 5534757..98c618f 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -16,6 +16,7 @@
"""Unit tests for DB API."""
import mock
+from oslo.config import cfg
from oslo.db import api
from oslo.db import exception
@@ -78,6 +79,13 @@ class DBAPITestCase(test_utils.BaseTestCase):
dbapi.api_class_call1(1, 'abc')
self.assertIsNotNone(dbapi._backend)
+ def test_dbapi_from_config(self):
+ conf = cfg.ConfigOpts()
+
+ dbapi = api.DBAPI.from_config(conf,
+ backend_mapping={'sqlalchemy': __name__})
+ self.assertIsNotNone(dbapi._backend)
+
class DBReconnectTestCase(DBAPITestCase):
def setUp(self):
diff --git a/tests/test_concurrency.py b/tests/test_concurrency.py
new file mode 100644
index 0000000..cf34bba
--- /dev/null
+++ b/tests/test_concurrency.py
@@ -0,0 +1,108 @@
+# Copyright 2014 Mirantis.inc
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sys
+
+import mock
+
+from oslo.db import concurrency
+from tests import utils as test_utils
+
+FAKE_BACKEND_MAPPING = {'sqlalchemy': 'fake.db.sqlalchemy.api'}
+
+
+class TpoolDbapiWrapperTestCase(test_utils.BaseTestCase):
+
+ def setUp(self):
+ super(TpoolDbapiWrapperTestCase, self).setUp()
+ self.db_api = concurrency.TpoolDbapiWrapper(
+ conf=self.conf, backend_mapping=FAKE_BACKEND_MAPPING)
+
+ # NOTE(akurilin): We are not going to add `eventlet` to `oslo.db` in
+ # requirements (`requirements.txt` and `test-requirements.txt`) due to
+ # the following reasons:
+ # - supporting of eventlet's thread pooling is totally optional;
+ # - we don't need to test `tpool.Proxy` functionality itself,
+ # because it's a tool from the third party library;
+ # - `eventlet` would prevent us from running unit tests on Python 3.x
+ # versions, because it doesn't support them yet.
+ #
+ # As we don't test `tpool.Proxy`, we can safely mock it in tests.
+
+ self.proxy = mock.MagicMock()
+ self.eventlet = mock.MagicMock()
+ self.eventlet.tpool.Proxy.return_value = self.proxy
+ sys.modules['eventlet'] = self.eventlet
+ self.addCleanup(sys.modules.pop, 'eventlet', None)
+
+ @mock.patch('oslo.db.api.DBAPI')
+ def test_db_api_common(self, mock_db_api):
+ # test context:
+ # CONF.database.use_tpool == False
+ # eventlet is installed
+ # expected result:
+ # TpoolDbapiWrapper should wrap DBAPI
+
+ fake_db_api = mock.MagicMock()
+ mock_db_api.from_config.return_value = fake_db_api
+
+ # get access to some db-api method
+ self.db_api.fake_call_1
+
+ mock_db_api.from_config.assert_called_once_with(
+ conf=self.conf, backend_mapping=FAKE_BACKEND_MAPPING)
+ self.assertEqual(self.db_api._db_api, fake_db_api)
+ self.assertFalse(self.eventlet.tpool.Proxy.called)
+
+ # get access to other db-api method to be sure that api didn't changed
+ self.db_api.fake_call_2
+
+ self.assertEqual(self.db_api._db_api, fake_db_api)
+ self.assertFalse(self.eventlet.tpool.Proxy.called)
+ self.assertEqual(1, mock_db_api.from_config.call_count)
+
+ @mock.patch('oslo.db.api.DBAPI')
+ def test_db_api_config_change(self, mock_db_api):
+ # test context:
+ # CONF.database.use_tpool == True
+ # eventlet is installed
+ # expected result:
+ # TpoolDbapiWrapper should wrap tpool proxy
+
+ fake_db_api = mock.MagicMock()
+ mock_db_api.from_config.return_value = fake_db_api
+ self.conf.set_override('use_tpool', True, group='database')
+
+ # get access to some db-api method
+ self.db_api.fake_call
+
+ # CONF.database.use_tpool is True, so we get tpool proxy in this case
+ mock_db_api.from_config.assert_called_once_with(
+ conf=self.conf, backend_mapping=FAKE_BACKEND_MAPPING)
+ self.eventlet.tpool.Proxy.assert_called_once_with(fake_db_api)
+ self.assertEqual(self.db_api._db_api, self.proxy)
+
+ @mock.patch('oslo.db.api.DBAPI')
+ def test_db_api_without_installed_eventlet(self, mock_db_api):
+ # test context:
+ # CONF.database.use_tpool == True
+ # eventlet is not installed
+ # expected result:
+ # raise ImportError
+
+ self.conf.set_override('use_tpool', True, group='database')
+ del sys.modules['eventlet']
+
+ self.assertRaises(ImportError, getattr, self.db_api, 'fake')
diff --git a/tox.ini b/tox.ini
index 1c483a6..fef9a35 100644
--- a/tox.ini
+++ b/tox.ini
@@ -26,6 +26,10 @@ commands = {posargs}
[testenv:cover]
commands = python setup.py testr --coverage --testr-args='{posargs}'
+[testenv:docs]
+commands =
+ python setup.py build_sphinx
+
[flake8]
# H803 skipped on purpose per list discussion.
# E123, E125 skipped as they are invalid PEP-8.