diff options
28 files changed, 560 insertions, 154 deletions
diff --git a/oslo.db/locale/en_GB/LC_MESSAGES/oslo.db-log-error.po b/oslo.db/locale/en_GB/LC_MESSAGES/oslo.db-log-error.po index 8e69aa6..e001a43 100644 --- a/oslo.db/locale/en_GB/LC_MESSAGES/oslo.db-log-error.po +++ b/oslo.db/locale/en_GB/LC_MESSAGES/oslo.db-log-error.po @@ -1,5 +1,5 @@ # Translations template for oslo.db. -# Copyright (C) 2014 ORGANIZATION +# Copyright (C) 2015 ORGANIZATION # This file is distributed under the same license as the oslo.db project. # # Translators: @@ -8,9 +8,9 @@ msgid "" msgstr "" "Project-Id-Version: oslo.db\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2014-09-12 06:00+0000\n" -"PO-Revision-Date: 2014-09-02 09:17+0000\n" -"Last-Translator: Andi Chandler <andi@gowling.com>\n" +"POT-Creation-Date: 2015-01-30 06:20+0000\n" +"PO-Revision-Date: 2015-01-27 15:33+0000\n" +"Last-Translator: openstackjenkins <jenkins@openstack.org>\n" "Language-Team: English (United Kingdom) (http://www.transifex.com/projects/p/" "oslodb/language/en_GB/)\n" "Language: en_GB\n" @@ -20,33 +20,33 @@ msgstr "" "Generated-By: Babel 1.3\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" -#: oslo/db/api.py:106 +#: oslo_db/api.py:120 msgid "DB exceeded retry limit." msgstr "DB exceeded retry limit." -#: oslo/db/api.py:110 -msgid "DB connection error." -msgstr "DB connection error." +#: oslo_db/api.py:124 +msgid "DB error." +msgstr "" -#: oslo/db/concurrency.py:64 +#: oslo_db/concurrency.py:64 msgid "'eventlet' is required for TpoolDbapiWrapper." msgstr "'eventlet' is required for TpoolDbapiWrapper." -#: oslo/db/sqlalchemy/exc_filters.py:277 +#: oslo_db/sqlalchemy/exc_filters.py:292 #, python-format msgid "DBAPIError exception wrapped from %s" msgstr "DBAPIError exception wrapped from %s" -#: oslo/db/sqlalchemy/exc_filters.py:288 +#: oslo_db/sqlalchemy/exc_filters.py:303 msgid "DB exception wrapped." msgstr "DB exception wrapped." -#: oslo/db/sqlalchemy/test_migrations.py:271 +#: oslo_db/sqlalchemy/test_migrations.py:272 #, python-format msgid "Failed to migrate to version %(ver)s on engine %(eng)s" msgstr "Failed to migrate to version %(ver)s on engine %(eng)s" -#: oslo/db/sqlalchemy/migration_cli/ext_migrate.py:61 +#: oslo_db/sqlalchemy/migration_cli/ext_migrate.py:61 msgid "" "Migration number for migrate plugin must be valid integer or empty, if you " "want to downgrade to initial state" diff --git a/oslo.db/locale/fr/LC_MESSAGES/oslo.db-log-error.po b/oslo.db/locale/fr/LC_MESSAGES/oslo.db-log-error.po index 8b9a10a..61050f5 100644 --- a/oslo.db/locale/fr/LC_MESSAGES/oslo.db-log-error.po +++ b/oslo.db/locale/fr/LC_MESSAGES/oslo.db-log-error.po @@ -1,5 +1,5 @@ # Translations template for oslo.db. -# Copyright (C) 2014 ORGANIZATION +# Copyright (C) 2015 ORGANIZATION # This file is distributed under the same license as the oslo.db project. # # Translators: @@ -8,9 +8,9 @@ msgid "" msgstr "" "Project-Id-Version: oslo.db\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2014-11-28 06:01+0000\n" -"PO-Revision-Date: 2014-11-23 18:37+0000\n" -"Last-Translator: Maxime COQUEREL <max.coquerel@gmail.com>\n" +"POT-Creation-Date: 2015-01-30 06:20+0000\n" +"PO-Revision-Date: 2015-01-27 15:33+0000\n" +"Last-Translator: openstackjenkins <jenkins@openstack.org>\n" "Language-Team: French (http://www.transifex.com/projects/p/oslodb/language/" "fr/)\n" "Language: fr\n" @@ -20,33 +20,33 @@ msgstr "" "Generated-By: Babel 1.3\n" "Plural-Forms: nplurals=2; plural=(n > 1);\n" -#: oslo/db/api.py:106 +#: oslo_db/api.py:120 msgid "DB exceeded retry limit." msgstr "DB limite de tentatives dépassé." -#: oslo/db/api.py:110 -msgid "DB connection error." -msgstr "Erreur de connexion DB." +#: oslo_db/api.py:124 +msgid "DB error." +msgstr "" -#: oslo/db/concurrency.py:64 +#: oslo_db/concurrency.py:64 msgid "'eventlet' is required for TpoolDbapiWrapper." msgstr "'eventlet' est requis pour poolDbapiWrapper." -#: oslo/db/sqlalchemy/exc_filters.py:292 +#: oslo_db/sqlalchemy/exc_filters.py:292 #, python-format msgid "DBAPIError exception wrapped from %s" msgstr "Exception DBAPIError enveloppé depuis %s" -#: oslo/db/sqlalchemy/exc_filters.py:303 +#: oslo_db/sqlalchemy/exc_filters.py:303 msgid "DB exception wrapped." msgstr "DB exception enveloppé." -#: oslo/db/sqlalchemy/test_migrations.py:272 +#: oslo_db/sqlalchemy/test_migrations.py:272 #, python-format msgid "Failed to migrate to version %(ver)s on engine %(eng)s" msgstr "Échec de migration de la version %(ver)s sur le moteur %(eng)s" -#: oslo/db/sqlalchemy/migration_cli/ext_migrate.py:61 +#: oslo_db/sqlalchemy/migration_cli/ext_migrate.py:61 msgid "" "Migration number for migrate plugin must be valid integer or empty, if you " "want to downgrade to initial state" diff --git a/oslo.db/locale/oslo.db-log-error.pot b/oslo.db/locale/oslo.db-log-error.pot index 0c5b4d2..85a4617 100644 --- a/oslo.db/locale/oslo.db-log-error.pot +++ b/oslo.db/locale/oslo.db-log-error.pot @@ -1,14 +1,14 @@ # Translations template for oslo.db. -# Copyright (C) 2014 ORGANIZATION +# Copyright (C) 2015 ORGANIZATION # This file is distributed under the same license as the oslo.db project. -# FIRST AUTHOR <EMAIL@ADDRESS>, 2014. +# FIRST AUTHOR <EMAIL@ADDRESS>, 2015. # #, fuzzy msgid "" msgstr "" -"Project-Id-Version: oslo.db 0.3.0.70.g69f16bf\n" +"Project-Id-Version: oslo.db 1.4.1.post9\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2014-08-05 06:03+0000\n" +"POT-Creation-Date: 2015-01-30 06:20+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language-Team: LANGUAGE <LL@li.org>\n" @@ -17,33 +17,33 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Generated-By: Babel 1.3\n" -#: oslo/db/api.py:105 +#: oslo_db/api.py:120 msgid "DB exceeded retry limit." msgstr "" -#: oslo/db/api.py:109 -msgid "DB connection error." +#: oslo_db/api.py:124 +msgid "DB error." msgstr "" -#: oslo/db/concurrency.py:64 +#: oslo_db/concurrency.py:64 msgid "'eventlet' is required for TpoolDbapiWrapper." msgstr "" -#: oslo/db/sqlalchemy/exc_filters.py:277 +#: oslo_db/sqlalchemy/exc_filters.py:292 #, python-format msgid "DBAPIError exception wrapped from %s" msgstr "" -#: oslo/db/sqlalchemy/exc_filters.py:288 +#: oslo_db/sqlalchemy/exc_filters.py:303 msgid "DB exception wrapped." msgstr "" -#: oslo/db/sqlalchemy/test_migrations.py:246 +#: oslo_db/sqlalchemy/test_migrations.py:272 #, python-format msgid "Failed to migrate to version %(ver)s on engine %(eng)s" msgstr "" -#: oslo/db/sqlalchemy/migration_cli/ext_migrate.py:61 +#: oslo_db/sqlalchemy/migration_cli/ext_migrate.py:61 msgid "" "Migration number for migrate plugin must be valid integer or empty, if " "you want to downgrade to initial state" diff --git a/oslo_db/api.py b/oslo_db/api.py index e673b37..98e4d75 100644 --- a/oslo_db/api.py +++ b/oslo_db/api.py @@ -24,10 +24,11 @@ API methods. """ import logging +import sys import threading import time -from oslo.utils import importutils +from oslo_utils import importutils import six from oslo_db._i18n import _LE @@ -55,18 +56,36 @@ def safe_for_db_retry(f): :param f: database api method. :type f: function. """ - f.__dict__['enable_retry'] = True + f.enable_retry_on_disconnect = True + return f + + +def retry_on_deadlock(f): + """Retry a DB API call if Deadlock was received. + + wrap_db_entry will be applied to all db.api functions marked with this + decorator. + """ + f.enable_retry_on_deadlock = True + return f + + +def retry_on_request(f): + """Retry a DB API call if RetryRequest exception was received. + + wrap_db_entry will be applied to all db.api functions marked with this + decorator. + """ + f.enable_retry_on_request = True return f class wrap_db_retry(object): - """Decorator class. Retry db.api methods, if DBConnectionError() raised. + """Retry db.api methods, if db_error raised - Retry decorated db.api methods. If we enabled `use_db_reconnect` - in config, this decorator will be applied to all db.api functions, - marked with @safe_for_db_retry decorator. - Decorator catches DBConnectionError() and retries function in a - loop until it succeeds, or until maximum retries count will be reached. + Retry decorated db.api methods. This decorator catches db_error and retries + function in a loop until it succeeds, or until maximum retries count + will be reached. Keyword arguments: @@ -83,10 +102,18 @@ class wrap_db_retry(object): :type max_retry_interval: int """ - def __init__(self, retry_interval, max_retries, inc_retry_interval, - max_retry_interval): + def __init__(self, retry_interval=0, max_retries=0, inc_retry_interval=0, + max_retry_interval=0, retry_on_disconnect=False, + retry_on_deadlock=False, retry_on_request=False): super(wrap_db_retry, self).__init__() + self.db_error = () + if retry_on_disconnect: + self.db_error += (exception.DBConnectionError, ) + if retry_on_deadlock: + self.db_error += (exception.DBDeadlock, ) + if retry_on_request: + self.db_error += (exception.RetryRequest, ) self.retry_interval = retry_interval self.max_retries = max_retries self.inc_retry_interval = inc_retry_interval @@ -97,17 +124,22 @@ class wrap_db_retry(object): def wrapper(*args, **kwargs): next_interval = self.retry_interval remaining = self.max_retries + db_error = self.db_error while True: try: return f(*args, **kwargs) - except exception.DBConnectionError as e: + except db_error as e: if remaining == 0: LOG.exception(_LE('DB exceeded retry limit.')) - raise exception.DBError(e) + if isinstance(e, exception.RetryRequest): + six.reraise(type(e.inner_exc), + e.inner_exc, + sys.exc_info()[2]) + raise e if remaining != -1: remaining -= 1 - LOG.exception(_LE('DB connection error.')) + LOG.exception(_LE('DB error.')) # NOTE(vsergeyev): We are using patched time module, so # this effectively yields the execution # context to another green thread. @@ -193,12 +225,20 @@ class DBAPI(object): # NOTE(vsergeyev): If `use_db_reconnect` option is set to True, retry # DB API methods, decorated with @safe_for_db_retry # on disconnect. - if self.use_db_reconnect and hasattr(attr, 'enable_retry'): + retry_on_disconnect = self.use_db_reconnect and getattr( + attr, 'enable_retry_on_disconnect', False) + retry_on_deadlock = getattr(attr, 'enable_retry_on_deadlock', False) + retry_on_request = getattr(attr, 'enable_retry_on_request', False) + + if retry_on_disconnect or retry_on_deadlock or retry_on_request: attr = wrap_db_retry( retry_interval=self.retry_interval, max_retries=self.max_retries, inc_retry_interval=self.inc_retry_interval, - max_retry_interval=self.max_retry_interval)(attr) + max_retry_interval=self.max_retry_interval, + retry_on_disconnect=retry_on_disconnect, + retry_on_deadlock=retry_on_deadlock, + retry_on_request=retry_on_request)(attr) return attr diff --git a/oslo_db/concurrency.py b/oslo_db/concurrency.py index 2c59623..07ede0a 100644 --- a/oslo_db/concurrency.py +++ b/oslo_db/concurrency.py @@ -17,7 +17,7 @@ import copy import logging import threading -from oslo.config import cfg +from oslo_config import cfg from oslo_db._i18n import _LE from oslo_db import api diff --git a/oslo_db/exception.py b/oslo_db/exception.py index 5de7f1e..f950f6a 100644 --- a/oslo_db/exception.py +++ b/oslo_db/exception.py @@ -171,3 +171,12 @@ class BackendNotAvailable(Exception): within a test suite. """ + + +class RetryRequest(Exception): + """Error raised when DB operation needs to be retried. + + That could be intentionally raised by the code without any real DB errors. + """ + def __init__(self, inner_exc): + self.inner_exc = inner_exc diff --git a/oslo_db/options.py b/oslo_db/options.py index b855064..42de948 100644 --- a/oslo_db/options.py +++ b/oslo_db/options.py @@ -12,7 +12,7 @@ import copy -from oslo.config import cfg +from oslo_config import cfg database_opts = [ @@ -119,18 +119,20 @@ database_opts = [ 'on connection lost.'), cfg.IntOpt('db_retry_interval', default=1, - help='Seconds between database connection retries.'), + help='Seconds between retries of a database transaction.'), cfg.BoolOpt('db_inc_retry_interval', default=True, - help='If True, increases the interval between database ' - 'connection retries up to db_max_retry_interval.'), + help='If True, increases the interval between retries ' + 'of a database operation up to db_max_retry_interval.'), cfg.IntOpt('db_max_retry_interval', default=10, help='If db_inc_retry_interval is set, the ' - 'maximum seconds between database connection retries.'), + 'maximum seconds between retries of a ' + 'database operation.'), cfg.IntOpt('db_max_retries', default=20, - help='Maximum database connection retries before error is ' + help='Maximum retries in case of connection error or deadlock ' + 'error before error is ' 'raised. Set to -1 to specify an infinite retry ' 'count.'), ] diff --git a/oslo_db/sqlalchemy/compat/utils.py b/oslo_db/sqlalchemy/compat/utils.py index fa6c3e7..e817718 100644 --- a/oslo_db/sqlalchemy/compat/utils.py +++ b/oslo_db/sqlalchemy/compat/utils.py @@ -24,3 +24,21 @@ sqla_097 = _SQLA_VERSION >= (0, 9, 7) sqla_094 = _SQLA_VERSION >= (0, 9, 4) sqla_090 = _SQLA_VERSION >= (0, 9, 0) sqla_08 = _SQLA_VERSION >= (0, 8) + + +def get_postgresql_enums(conn): + """Return a list of ENUM type names on a Postgresql backend. + + For SQLAlchemy 0.9 and lower, makes use of the semi-private + _load_enums() method of the Postgresql dialect. In SQLAlchemy + 1.0 this feature is supported using get_enums(). + + This function may only be called when the given connection + is against the Postgresql backend. It will fail for other + kinds of backends. + + """ + if sqla_100: + return [e['name'] for e in sqlalchemy.inspect(conn).get_enums()] + else: + return conn.dialect._load_enums(conn).keys() diff --git a/oslo_db/sqlalchemy/migration_cli/ext_alembic.py b/oslo_db/sqlalchemy/migration_cli/ext_alembic.py index 243ae47..1dbf88f 100644 --- a/oslo_db/sqlalchemy/migration_cli/ext_alembic.py +++ b/oslo_db/sqlalchemy/migration_cli/ext_alembic.py @@ -17,7 +17,6 @@ from alembic import config as alembic_config import alembic.migration as alembic_migration from oslo_db.sqlalchemy.migration_cli import ext_base -from oslo_db.sqlalchemy import session as db_session class AlembicExtension(ext_base.MigrationExtensionBase): @@ -28,31 +27,41 @@ class AlembicExtension(ext_base.MigrationExtensionBase): def enabled(self): return os.path.exists(self.alembic_ini_path) - def __init__(self, migration_config): + def __init__(self, engine, migration_config): """Extension to provide alembic features. + :param engine: SQLAlchemy engine instance for a given database + :type engine: sqlalchemy.engine.Engine :param migration_config: Stores specific configuration for migrations :type migration_config: dict """ self.alembic_ini_path = migration_config.get('alembic_ini_path', '') self.config = alembic_config.Config(self.alembic_ini_path) + # TODO(viktors): Remove this, when we will use Alembic 0.7.5 or + # higher, because the ``attributes`` dictionary was + # added to Alembic in version 0.7.5. + if not hasattr(self.config, 'attributes'): + self.config.attributes = {} # option should be used if script is not in default directory repo_path = migration_config.get('alembic_repo_path') if repo_path: self.config.set_main_option('script_location', repo_path) - self.db_url = migration_config['db_url'] + self.engine = engine def upgrade(self, version): - return alembic.command.upgrade(self.config, version or 'head') + with self.engine.begin() as connection: + self.config.attributes['connection'] = connection + return alembic.command.upgrade(self.config, version or 'head') def downgrade(self, version): if isinstance(version, int) or version is None or version.isdigit(): version = 'base' - return alembic.command.downgrade(self.config, version) + with self.engine.begin() as connection: + self.config.attributes['connection'] = connection + return alembic.command.downgrade(self.config, version) def version(self): - engine = db_session.create_engine(self.db_url) - with engine.connect() as conn: + with self.engine.connect() as conn: context = alembic_migration.MigrationContext.configure(conn) return context.get_current_revision() @@ -65,8 +74,10 @@ class AlembicExtension(ext_base.MigrationExtensionBase): state :type autogenerate: bool """ - return alembic.command.revision(self.config, message=message, - autogenerate=autogenerate) + with self.engine.begin() as connection: + self.config.attributes['connection'] = connection + return alembic.command.revision(self.config, message=message, + autogenerate=autogenerate) def stamp(self, revision): """Stamps database with provided revision. @@ -75,4 +86,6 @@ class AlembicExtension(ext_base.MigrationExtensionBase): database with most recent revision :type revision: string """ - return alembic.command.stamp(self.config, revision=revision) + with self.engine.begin() as connection: + self.config.attributes['connection'] = connection + return alembic.command.stamp(self.config, revision=revision) diff --git a/oslo_db/sqlalchemy/migration_cli/ext_migrate.py b/oslo_db/sqlalchemy/migration_cli/ext_migrate.py index e31ee3d..eb72818 100644 --- a/oslo_db/sqlalchemy/migration_cli/ext_migrate.py +++ b/oslo_db/sqlalchemy/migration_cli/ext_migrate.py @@ -16,7 +16,6 @@ import os from oslo_db._i18n import _LE from oslo_db.sqlalchemy import migration from oslo_db.sqlalchemy.migration_cli import ext_base -from oslo_db.sqlalchemy import session as db_session LOG = logging.getLogger(__name__) @@ -31,11 +30,10 @@ class MigrateExtension(ext_base.MigrationExtensionBase): order = 1 - def __init__(self, migration_config): + def __init__(self, engine, migration_config): + self.engine = engine self.repository = migration_config.get('migration_repo_path', '') self.init_version = migration_config.get('init_version', 0) - self.db_url = migration_config['db_url'] - self.engine = db_session.create_engine(self.db_url) @property def enabled(self): diff --git a/oslo_db/sqlalchemy/migration_cli/manager.py b/oslo_db/sqlalchemy/migration_cli/manager.py index c8ab30e..9b55887 100644 --- a/oslo_db/sqlalchemy/migration_cli/manager.py +++ b/oslo_db/sqlalchemy/migration_cli/manager.py @@ -10,6 +10,7 @@ # License for the specific language governing permissions and limitations # under the License. +import sqlalchemy from stevedore import enabled @@ -23,11 +24,21 @@ def check_plugin_enabled(ext): class MigrationManager(object): - def __init__(self, migration_config): + def __init__(self, migration_config, engine=None): + if engine is None: + if migration_config.get('db_url'): + engine = sqlalchemy.create_engine( + migration_config['db_url'], + poolclass=sqlalchemy.pool.NullPool, + ) + else: + raise ValueError('Either database url or engine' + ' must be provided.') + self._manager = enabled.EnabledExtensionManager( MIGRATION_NAMESPACE, check_plugin_enabled, - invoke_kwds={'migration_config': migration_config}, + invoke_args=(engine, migration_config), invoke_on_load=True ) if not self._plugins: @@ -57,7 +68,7 @@ class MigrationManager(object): last = None for plugin in self._plugins: version = plugin.version() - if version: + if version is not None: last = version return last diff --git a/oslo_db/sqlalchemy/models.py b/oslo_db/sqlalchemy/models.py index 818c1b4..80fa01d 100644 --- a/oslo_db/sqlalchemy/models.py +++ b/oslo_db/sqlalchemy/models.py @@ -22,7 +22,7 @@ SQLAlchemy models. import six -from oslo.utils import timeutils +from oslo_utils import timeutils from sqlalchemy import Column, Integer from sqlalchemy import DateTime from sqlalchemy.orm import object_mapper diff --git a/oslo_db/sqlalchemy/provision.py b/oslo_db/sqlalchemy/provision.py index cce7025..ffb0cca 100644 --- a/oslo_db/sqlalchemy/provision.py +++ b/oslo_db/sqlalchemy/provision.py @@ -27,9 +27,11 @@ import six from six import moves import sqlalchemy from sqlalchemy.engine import url as sa_url +from sqlalchemy import schema from oslo_db._i18n import _LI from oslo_db import exception +from oslo_db.sqlalchemy.compat import utils as compat_utils from oslo_db.sqlalchemy import session from oslo_db.sqlalchemy import utils @@ -56,6 +58,9 @@ class ProvisionedDatabase(object): self.backend.create_named_database(self.db_token) self.engine = self.backend.provisioned_engine(self.db_token) + def drop_all_objects(self): + self.backend.drop_all_objects(self.engine) + def dispose(self): self.engine.dispose() self.backend.drop_named_database(self.db_token) @@ -179,6 +184,15 @@ class Backend(object): self.engine, ident, conditional=conditional) + def drop_all_objects(self, engine): + """Drop all database objects. + + Drops all database objects remaining on the default schema of the + given engine. + + """ + self.impl.drop_all_objects(engine) + def database_exists(self, ident): """Return True if a database of the given name exists.""" @@ -246,6 +260,8 @@ class BackendImpl(object): default_engine_kwargs = {} + supports_drop_fk = True + @classmethod def all_impls(cls): """Return an iterator of all possible BackendImpl objects. @@ -294,6 +310,49 @@ class BackendImpl(object): def drop_named_database(self, engine, ident, conditional=False): """Drop a database with the given name.""" + def drop_all_objects(self, engine): + """Drop all database objects. + + Drops all database objects remaining on the default schema of the + given engine. + + Per-db implementations will also need to drop items specific to those + systems, such as sequences, custom types (e.g. pg ENUM), etc. + + """ + + with engine.begin() as conn: + inspector = sqlalchemy.inspect(engine) + metadata = schema.MetaData() + tbs = [] + all_fks = [] + + for table_name in inspector.get_table_names(): + fks = [] + for fk in inspector.get_foreign_keys(table_name): + # note that SQLite reflection does not have names + # for foreign keys until SQLAlchemy 1.0 + if not fk['name']: + continue + fks.append( + schema.ForeignKeyConstraint((), (), name=fk['name']) + ) + table = schema.Table(table_name, metadata, *fks) + tbs.append(table) + all_fks.extend(fks) + + if self.supports_drop_fk: + for fkc in all_fks: + conn.execute(schema.DropConstraint(fkc)) + + for table in tbs: + conn.execute(schema.DropTable(table)) + + self.drop_additional_objects(conn) + + def drop_additional_objects(self, conn): + pass + def provisioned_engine(self, base_url, ident): """Return a provisioned engine. @@ -344,6 +403,9 @@ class MySQLBackendImpl(BackendImpl): @BackendImpl.impl.dispatch_for("sqlite") class SQLiteBackendImpl(BackendImpl): + + supports_drop_fk = False + def create_opportunistic_driver_url(self): return "sqlite://" @@ -394,6 +456,12 @@ class PostgresqlBackendImpl(BackendImpl): else: conn.execute("DROP DATABASE %s" % ident) + def drop_additional_objects(self, conn): + enums = compat_utils.get_postgresql_enums(conn) + + for e in enums: + conn.execute("DROP TYPE %s" % e) + def database_exists(self, engine, ident): return bool( engine.scalar( diff --git a/oslo_db/sqlalchemy/session.py b/oslo_db/sqlalchemy/session.py index ef253b9..7e33075 100644 --- a/oslo_db/sqlalchemy/session.py +++ b/oslo_db/sqlalchemy/session.py @@ -283,7 +283,7 @@ import logging import re import time -from oslo.utils import timeutils +from oslo_utils import timeutils import six import sqlalchemy.orm from sqlalchemy import pool diff --git a/oslo_db/sqlalchemy/test_base.py b/oslo_db/sqlalchemy/test_base.py index aaff621..601a2c8 100644 --- a/oslo_db/sqlalchemy/test_base.py +++ b/oslo_db/sqlalchemy/test_base.py @@ -61,6 +61,7 @@ class DbFixture(fixtures.Fixture): msg = '%s backend is not available.' % self.DRIVER return self.test.skip(msg) else: + self.test.provision = self.provision self.test.engine = self.provision.engine self.addCleanup(setattr, self.test, 'engine', None) self.test.sessionmaker = session.get_maker(self.test.engine) diff --git a/oslo_db/sqlalchemy/test_migrations.py b/oslo_db/sqlalchemy/test_migrations.py index 9b65421..7627d21 100644 --- a/oslo_db/sqlalchemy/test_migrations.py +++ b/oslo_db/sqlalchemy/test_migrations.py @@ -25,9 +25,7 @@ import alembic.migration import pkg_resources as pkg import six import sqlalchemy -from sqlalchemy.engine import reflection import sqlalchemy.exc -from sqlalchemy import schema import sqlalchemy.sql.expression as expr import sqlalchemy.types as types @@ -486,30 +484,7 @@ class ModelsMigrationsSync(object): return insp_def != "'%s'::character varying" % meta_def.arg def _cleanup(self): - engine = self.get_engine() - with engine.begin() as conn: - inspector = reflection.Inspector.from_engine(engine) - metadata = schema.MetaData() - tbs = [] - all_fks = [] - - for table_name in inspector.get_table_names(): - fks = [] - for fk in inspector.get_foreign_keys(table_name): - if not fk['name']: - continue - fks.append( - schema.ForeignKeyConstraint((), (), name=fk['name']) - ) - table = schema.Table(table_name, metadata, *fks) - tbs.append(table) - all_fks.extend(fks) - - for fkc in all_fks: - conn.execute(schema.DropConstraint(fkc)) - - for table in tbs: - conn.execute(schema.DropTable(table)) + self.provision.drop_all_objects() FKInfo = collections.namedtuple('fk_info', ['constrained_columns', 'referred_table', diff --git a/oslo_db/sqlalchemy/utils.py b/oslo_db/sqlalchemy/utils.py index 919ac9e..5505b79 100644 --- a/oslo_db/sqlalchemy/utils.py +++ b/oslo_db/sqlalchemy/utils.py @@ -20,7 +20,7 @@ import collections import logging import re -from oslo.utils import timeutils +from oslo_utils import timeutils import six import sqlalchemy from sqlalchemy import Boolean diff --git a/oslo_db/tests/old_import_api/sqlalchemy/test_migrate_cli.py b/oslo_db/tests/old_import_api/sqlalchemy/test_migrate_cli.py index 135d44e..660a6af 100644 --- a/oslo_db/tests/old_import_api/sqlalchemy/test_migrate_cli.py +++ b/oslo_db/tests/old_import_api/sqlalchemy/test_migrate_cli.py @@ -12,6 +12,7 @@ import mock from oslotest import base as test_base +import sqlalchemy from oslo.db.sqlalchemy.migration_cli import ext_alembic from oslo.db.sqlalchemy.migration_cli import ext_migrate @@ -35,7 +36,9 @@ class TestAlembicExtension(test_base.BaseTestCase): def setUp(self): self.migration_config = {'alembic_ini_path': '.', 'db_url': 'sqlite://'} - self.alembic = ext_alembic.AlembicExtension(self.migration_config) + self.engine = sqlalchemy.create_engine(self.migration_config['db_url']) + self.alembic = ext_alembic.AlembicExtension( + self.engine, self.migration_config) super(TestAlembicExtension, self).setUp() def test_check_enabled_true(self, command): @@ -52,7 +55,8 @@ class TestAlembicExtension(test_base.BaseTestCase): Verifies enabled returns False on empty alembic_ini_path variable """ self.migration_config['alembic_ini_path'] = '' - alembic = ext_alembic.AlembicExtension(self.migration_config) + alembic = ext_alembic.AlembicExtension( + self.engine, self.migration_config) self.assertFalse(alembic.enabled) def test_upgrade_none(self, command): @@ -91,14 +95,16 @@ class TestAlembicExtension(test_base.BaseTestCase): self.assertIsNone(version) -@mock.patch(('oslo.db.sqlalchemy.migration_cli.' +@mock.patch(('oslo_db.sqlalchemy.migration_cli.' 'ext_migrate.migration')) class TestMigrateExtension(test_base.BaseTestCase): def setUp(self): self.migration_config = {'migration_repo_path': '.', 'db_url': 'sqlite://'} - self.migrate = ext_migrate.MigrateExtension(self.migration_config) + self.engine = sqlalchemy.create_engine(self.migration_config['db_url']) + self.migrate = ext_migrate.MigrateExtension( + self.engine, self.migration_config) super(TestMigrateExtension, self).setUp() def test_check_enabled_true(self, migration): @@ -106,7 +112,8 @@ class TestMigrateExtension(test_base.BaseTestCase): def test_check_enabled_false(self, migration): self.migration_config['migration_repo_path'] = '' - migrate = ext_migrate.MigrateExtension(self.migration_config) + migrate = ext_migrate.MigrateExtension( + self.engine, self.migration_config) self.assertFalse(migrate.enabled) def test_upgrade_head(self, migration): @@ -143,7 +150,8 @@ class TestMigrateExtension(test_base.BaseTestCase): def test_change_init_version(self, migration): self.migration_config['init_version'] = 101 - migrate = ext_migrate.MigrateExtension(self.migration_config) + migrate = ext_migrate.MigrateExtension( + self.engine, self.migration_config) migrate.downgrade(None) migration.db_sync.assert_called_once_with( migrate.engine, @@ -158,9 +166,11 @@ class TestMigrationManager(test_base.BaseTestCase): self.migration_config = {'alembic_ini_path': '.', 'migrate_repo_path': '.', 'db_url': 'sqlite://'} + engine = sqlalchemy.create_engine(self.migration_config['db_url']) self.migration_manager = manager.MigrationManager( - self.migration_config) + self.migration_config, engine) self.ext = mock.Mock() + self.ext.obj.version = mock.Mock(return_value=0) self.migration_manager._manager.extensions = [self.ext] super(TestMigrationManager, self).setUp() @@ -180,6 +190,10 @@ class TestMigrationManager(test_base.BaseTestCase): self.migration_manager.version() self.ext.obj.version.assert_called_once_with() + def test_version_return_value(self): + version = self.migration_manager.version() + self.assertEqual(0, version) + def test_revision_message_autogenerate(self): self.migration_manager.revision('test', True) self.ext.obj.revision.assert_called_once_with('test', True) @@ -192,6 +206,13 @@ class TestMigrationManager(test_base.BaseTestCase): self.migration_manager.stamp('stamp') self.ext.obj.stamp.assert_called_once_with('stamp') + def test_wrong_config(self): + err = self.assertRaises(ValueError, + manager.MigrationManager, + {'wrong_key': 'sqlite://'}) + self.assertEqual('Either database url or engine must be provided.', + err.args[0]) + class TestMigrationRightOrder(test_base.BaseTestCase): @@ -199,8 +220,9 @@ class TestMigrationRightOrder(test_base.BaseTestCase): self.migration_config = {'alembic_ini_path': '.', 'migrate_repo_path': '.', 'db_url': 'sqlite://'} + engine = sqlalchemy.create_engine(self.migration_config['db_url']) self.migration_manager = manager.MigrationManager( - self.migration_config) + self.migration_config, engine) self.first_ext = MockWithCmp() self.first_ext.obj.order = 1 self.first_ext.obj.upgrade.return_value = 100 diff --git a/oslo_db/tests/old_import_api/test_api.py b/oslo_db/tests/old_import_api/test_api.py index aa69d55..1fe3bf3 100644 --- a/oslo_db/tests/old_import_api/test_api.py +++ b/oslo_db/tests/old_import_api/test_api.py @@ -33,32 +33,60 @@ def get_backend(): class DBAPI(object): - def _api_raise(self, *args, **kwargs): - """Simulate raising a database-has-gone-away error + def _api_raise(self, exception_to_raise, *args, **kwargs): + """Simulate raising a database error This method creates a fake OperationalError with an ID matching a valid MySQL "database has gone away" situation. It also decrements the error_counter so that we can artificially keep track of how many times this function is called by the wrapper. When error_counter reaches zero, this function returns True, simulating - the database becoming available again and the query succeeding. + the query succeeding. """ if self.error_counter > 0: self.error_counter -= 1 orig = sqla.exc.DBAPIError(False, False, False) orig.args = [2006, 'Test raise operational error'] - e = exception.DBConnectionError(orig) + exception_type = type(exception_to_raise) + e = exception_type(orig) raise e else: return True - def api_raise_default(self, *args, **kwargs): - return self._api_raise(*args, **kwargs) + def api_raise_conn_err_default(self, *args, **kwargs): + return self._api_raise(exception.DBConnectionError(), *args, **kwargs) @api.safe_for_db_retry - def api_raise_enable_retry(self, *args, **kwargs): - return self._api_raise(*args, **kwargs) + def api_raise_conn_err_enable_retry(self, *args, **kwargs): + return self._api_raise(exception.DBConnectionError(), *args, **kwargs) + + def api_raise_deadlock_err_default(self, *args, **kwargs): + return self._api_raise(exception.DBDeadlock(), *args, **kwargs) + + @api.retry_on_deadlock + def api_raise_deadlock_err_decorated(self, *args, **kwargs): + return self._api_raise(exception.DBDeadlock(), *args, **kwargs) + + @api.safe_for_db_retry + def api_raise_deadlock_safe_db_retry_decorated(self, *args, **kwargs): + return self._api_raise(exception.DBDeadlock(), *args, **kwargs) + + @api.safe_for_db_retry + @api.retry_on_deadlock + def api_raise_deadlock_err_two_decorators(self, *args, **kwargs): + if self.error_counter > 2: + return False + if self.error_counter == 2: + self.error_counter -= 1 + orig = sqla.exc.DBAPIError(False, False, False) + orig.args = [2006, 'Test raise operational error'] + raise exception.DBConnectionError(orig) + if self.error_counter == 1: + self.error_counter -= 1 + raise exception.DBDeadlock() + else: + return True def api_class_call1(_self, *args, **kwargs): return args, kwargs @@ -103,14 +131,15 @@ class DBReconnectTestCase(DBAPITestCase): self.dbapi = api.DBAPI('sqlalchemy', {'sqlalchemy': __name__}) self.test_db_api.error_counter = 5 - self.assertRaises(exception.DBConnectionError, self.dbapi._api_raise) + self.assertRaises(exception.DBConnectionError, + self.dbapi.api_raise_conn_err_default) def test_raise_connection_error_decorated(self): self.dbapi = api.DBAPI('sqlalchemy', {'sqlalchemy': __name__}) self.test_db_api.error_counter = 5 self.assertRaises(exception.DBConnectionError, - self.dbapi.api_raise_enable_retry) + self.dbapi.api_raise_conn_err_enable_retry) self.assertEqual(4, self.test_db_api.error_counter, 'Unexpected retry') def test_raise_connection_error_enabled(self): @@ -120,7 +149,7 @@ class DBReconnectTestCase(DBAPITestCase): self.test_db_api.error_counter = 5 self.assertRaises(exception.DBConnectionError, - self.dbapi.api_raise_default) + self.dbapi.api_raise_conn_err_default) self.assertEqual(4, self.test_db_api.error_counter, 'Unexpected retry') def test_retry_one(self): @@ -129,12 +158,9 @@ class DBReconnectTestCase(DBAPITestCase): use_db_reconnect=True, retry_interval=1) - try: - func = self.dbapi.api_raise_enable_retry - self.test_db_api.error_counter = 1 - self.assertTrue(func(), 'Single retry did not succeed.') - except Exception: - self.fail('Single retry raised an un-wrapped error.') + func = self.dbapi.api_raise_conn_err_enable_retry + self.test_db_api.error_counter = 1 + self.assertTrue(func(), 'Single retry did not succeed.') self.assertEqual( 0, self.test_db_api.error_counter, @@ -147,12 +173,9 @@ class DBReconnectTestCase(DBAPITestCase): retry_interval=1, inc_retry_interval=False) - try: - func = self.dbapi.api_raise_enable_retry - self.test_db_api.error_counter = 2 - self.assertTrue(func(), 'Multiple retry did not succeed.') - except Exception: - self.fail('Multiple retry raised an un-wrapped error.') + func = self.dbapi.api_raise_conn_err_enable_retry + self.test_db_api.error_counter = 2 + self.assertTrue(func(), 'Multiple retry did not succeed.') self.assertEqual( 0, self.test_db_api.error_counter, @@ -166,7 +189,7 @@ class DBReconnectTestCase(DBAPITestCase): inc_retry_interval=False, max_retries=3) - func = self.dbapi.api_raise_enable_retry + func = self.dbapi.api_raise_conn_err_enable_retry self.test_db_api.error_counter = 5 self.assertRaises( exception.DBError, func, @@ -175,3 +198,105 @@ class DBReconnectTestCase(DBAPITestCase): self.assertNotEqual( 0, self.test_db_api.error_counter, 'Retry did not stop after sql_max_retries iterations.') + + +class DBDeadlockTestCase(DBAPITestCase): + def setUp(self): + super(DBDeadlockTestCase, self).setUp() + + self.test_db_api = DBAPI() + patcher = mock.patch(__name__ + '.get_backend', + return_value=self.test_db_api) + patcher.start() + self.addCleanup(patcher.stop) + + def test_raise_deadlock_error(self): + self.dbapi = api.DBAPI('sqlalchemy', {'sqlalchemy': __name__}) + + self.test_db_api.error_counter = 5 + self.assertRaises( + exception.DBDeadlock, + self.dbapi.api_raise_deadlock_err_default) + + def test_raise_deadlock_error_db_reconnect_enabled(self): + self.dbapi = api.DBAPI('sqlalchemy', + {'sqlalchemy': __name__}, + use_db_reconnect=True) + + self.test_db_api.error_counter = 5 + self.assertRaises(exception.DBDeadlock, + self.dbapi.api_raise_deadlock_err_default) + self.assertEqual(4, self.test_db_api.error_counter, 'Unexpected retry') + + def test_raise_deadlock_error_connection_error_decorated(self): + self.dbapi = api.DBAPI('sqlalchemy', + {'sqlalchemy': __name__}, + use_db_reconnect=True) + + self.test_db_api.error_counter = 5 + self.assertRaises( + exception.DBDeadlock, + self.dbapi.api_raise_deadlock_safe_db_retry_decorated) + + def test_retry_one(self): + self.dbapi = api.DBAPI('sqlalchemy', + {'sqlalchemy': __name__}, + retry_interval=1) + + func = self.dbapi.api_raise_deadlock_err_decorated + self.test_db_api.error_counter = 1 + self.assertTrue(func(), 'Single retry did not succeed.') + + self.assertEqual( + 0, self.test_db_api.error_counter, + 'Counter not decremented, retry logic probably failed.') + + def test_retry_two(self): + self.dbapi = api.DBAPI('sqlalchemy', + {'sqlalchemy': __name__}, + retry_interval=1, + inc_retry_interval=False) + + func = self.dbapi.api_raise_deadlock_err_decorated + self.test_db_api.error_counter = 2 + self.assertTrue(func(), 'Multiple retry did not succeed.') + + self.assertEqual( + 0, self.test_db_api.error_counter, + 'Counter not decremented, retry logic probably failed.') + + def test_retry_two_different_exception(self): + self.dbapi = api.DBAPI('sqlalchemy', + {'sqlalchemy': __name__}, + use_db_reconnect=True, + retry_interval=1, + inc_retry_interval=False) + + func = self.dbapi.api_raise_deadlock_err_two_decorators + self.test_db_api.error_counter = 2 + self.assertTrue(func(), 'Multiple retry did not succeed.') + + self.assertEqual( + 0, self.test_db_api.error_counter, + 'Counter not decremented, retry logic probably failed.') + + +class DBRetryRequestCase(DBAPITestCase): + def test_retry_wrapper_succeeds(self): + @api.wrap_db_retry(max_retries=10, retry_on_request=True) + def some_method(): + pass + + some_method() + + def test_retry_wrapper_reaches_limit(self): + max_retries = 10 + + @api.wrap_db_retry(max_retries=10, retry_on_request=True) + def some_method(res): + res['result'] += 1 + raise exception.RetryRequest(ValueError()) + + res = {'result': 0} + self.assertRaises(ValueError, some_method, res) + self.assertEqual(max_retries + 1, res['result']) diff --git a/oslo_db/tests/sqlalchemy/test_migrate_cli.py b/oslo_db/tests/sqlalchemy/test_migrate_cli.py index c1ab53c..209dfab 100644 --- a/oslo_db/tests/sqlalchemy/test_migrate_cli.py +++ b/oslo_db/tests/sqlalchemy/test_migrate_cli.py @@ -12,6 +12,7 @@ import mock from oslotest import base as test_base +import sqlalchemy from oslo_db.sqlalchemy.migration_cli import ext_alembic from oslo_db.sqlalchemy.migration_cli import ext_migrate @@ -35,7 +36,9 @@ class TestAlembicExtension(test_base.BaseTestCase): def setUp(self): self.migration_config = {'alembic_ini_path': '.', 'db_url': 'sqlite://'} - self.alembic = ext_alembic.AlembicExtension(self.migration_config) + self.engine = sqlalchemy.create_engine(self.migration_config['db_url']) + self.alembic = ext_alembic.AlembicExtension( + self.engine, self.migration_config) super(TestAlembicExtension, self).setUp() def test_check_enabled_true(self, command): @@ -52,7 +55,8 @@ class TestAlembicExtension(test_base.BaseTestCase): Verifies enabled returns False on empty alembic_ini_path variable """ self.migration_config['alembic_ini_path'] = '' - alembic = ext_alembic.AlembicExtension(self.migration_config) + alembic = ext_alembic.AlembicExtension( + self.engine, self.migration_config) self.assertFalse(alembic.enabled) def test_upgrade_none(self, command): @@ -98,7 +102,9 @@ class TestMigrateExtension(test_base.BaseTestCase): def setUp(self): self.migration_config = {'migration_repo_path': '.', 'db_url': 'sqlite://'} - self.migrate = ext_migrate.MigrateExtension(self.migration_config) + self.engine = sqlalchemy.create_engine(self.migration_config['db_url']) + self.migrate = ext_migrate.MigrateExtension( + self.engine, self.migration_config) super(TestMigrateExtension, self).setUp() def test_check_enabled_true(self, migration): @@ -106,7 +112,8 @@ class TestMigrateExtension(test_base.BaseTestCase): def test_check_enabled_false(self, migration): self.migration_config['migration_repo_path'] = '' - migrate = ext_migrate.MigrateExtension(self.migration_config) + migrate = ext_migrate.MigrateExtension( + self.engine, self.migration_config) self.assertFalse(migrate.enabled) def test_upgrade_head(self, migration): @@ -143,7 +150,8 @@ class TestMigrateExtension(test_base.BaseTestCase): def test_change_init_version(self, migration): self.migration_config['init_version'] = 101 - migrate = ext_migrate.MigrateExtension(self.migration_config) + migrate = ext_migrate.MigrateExtension( + self.engine, self.migration_config) migrate.downgrade(None) migration.db_sync.assert_called_once_with( migrate.engine, @@ -158,9 +166,11 @@ class TestMigrationManager(test_base.BaseTestCase): self.migration_config = {'alembic_ini_path': '.', 'migrate_repo_path': '.', 'db_url': 'sqlite://'} + engine = sqlalchemy.create_engine(self.migration_config['db_url']) self.migration_manager = manager.MigrationManager( - self.migration_config) + self.migration_config, engine) self.ext = mock.Mock() + self.ext.obj.version = mock.Mock(return_value=0) self.migration_manager._manager.extensions = [self.ext] super(TestMigrationManager, self).setUp() @@ -180,6 +190,10 @@ class TestMigrationManager(test_base.BaseTestCase): self.migration_manager.version() self.ext.obj.version.assert_called_once_with() + def test_version_return_value(self): + version = self.migration_manager.version() + self.assertEqual(0, version) + def test_revision_message_autogenerate(self): self.migration_manager.revision('test', True) self.ext.obj.revision.assert_called_once_with('test', True) @@ -192,6 +206,13 @@ class TestMigrationManager(test_base.BaseTestCase): self.migration_manager.stamp('stamp') self.ext.obj.stamp.assert_called_once_with('stamp') + def test_wrong_config(self): + err = self.assertRaises(ValueError, + manager.MigrationManager, + {'wrong_key': 'sqlite://'}) + self.assertEqual('Either database url or engine must be provided.', + err.args[0]) + class TestMigrationRightOrder(test_base.BaseTestCase): @@ -199,8 +220,9 @@ class TestMigrationRightOrder(test_base.BaseTestCase): self.migration_config = {'alembic_ini_path': '.', 'migrate_repo_path': '.', 'db_url': 'sqlite://'} + engine = sqlalchemy.create_engine(self.migration_config['db_url']) self.migration_manager = manager.MigrationManager( - self.migration_config) + self.migration_config, engine) self.first_ext = MockWithCmp() self.first_ext.obj.order = 1 self.first_ext.obj.upgrade.return_value = 100 diff --git a/oslo_db/tests/sqlalchemy/test_options.py b/oslo_db/tests/sqlalchemy/test_options.py index 22a6e4f..f58c530 100644 --- a/oslo_db/tests/sqlalchemy/test_options.py +++ b/oslo_db/tests/sqlalchemy/test_options.py @@ -11,8 +11,8 @@ # License for the specific language governing permissions and limitations # under the License. -from oslo.config import cfg -from oslo.config import fixture as config +from oslo_config import cfg +from oslo_config import fixture as config from oslo_db import options from oslo_db.tests import utils as test_utils diff --git a/oslo_db/tests/sqlalchemy/test_provision.py b/oslo_db/tests/sqlalchemy/test_provision.py new file mode 100644 index 0000000..7c57de3 --- /dev/null +++ b/oslo_db/tests/sqlalchemy/test_provision.py @@ -0,0 +1,81 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +from sqlalchemy import inspect +from sqlalchemy import schema +from sqlalchemy import types + +from oslo_db.sqlalchemy import test_base + + +class DropAllObjectsTest(test_base.DbTestCase): + + def setUp(self): + super(DropAllObjectsTest, self).setUp() + + self.metadata = metadata = schema.MetaData() + schema.Table( + 'a', metadata, + schema.Column('id', types.Integer, primary_key=True), + mysql_engine='InnoDB' + ) + schema.Table( + 'b', metadata, + schema.Column('id', types.Integer, primary_key=True), + schema.Column('a_id', types.Integer, schema.ForeignKey('a.id')), + mysql_engine='InnoDB' + ) + schema.Table( + 'c', metadata, + schema.Column('id', types.Integer, primary_key=True), + schema.Column('b_id', types.Integer, schema.ForeignKey('b.id')), + schema.Column( + 'd_id', types.Integer, + schema.ForeignKey('d.id', use_alter=True, name='c_d_fk')), + mysql_engine='InnoDB' + ) + schema.Table( + 'd', metadata, + schema.Column('id', types.Integer, primary_key=True), + schema.Column('c_id', types.Integer, schema.ForeignKey('c.id')), + mysql_engine='InnoDB' + ) + + metadata.create_all(self.engine, checkfirst=False) + # will drop nothing if the test worked + self.addCleanup(metadata.drop_all, self.engine, checkfirst=True) + + def test_drop_all(self): + insp = inspect(self.engine) + self.assertEqual( + set(['a', 'b', 'c', 'd']), + set(insp.get_table_names()) + ) + + self.provision.drop_all_objects() + + insp = inspect(self.engine) + self.assertEqual( + [], + insp.get_table_names() + ) + + +class MySQLRetainSchemaTest( + DropAllObjectsTest, test_base.MySQLOpportunisticTestCase): + pass + + +class PostgresqlRetainSchemaTest( + DropAllObjectsTest, test_base.PostgreSQLOpportunisticTestCase): + pass diff --git a/oslo_db/tests/sqlalchemy/test_sqlalchemy.py b/oslo_db/tests/sqlalchemy/test_sqlalchemy.py index 7f54595..bcc4a1c 100644 --- a/oslo_db/tests/sqlalchemy/test_sqlalchemy.py +++ b/oslo_db/tests/sqlalchemy/test_sqlalchemy.py @@ -21,7 +21,7 @@ import logging import fixtures import mock -from oslo.config import cfg +from oslo_config import cfg from oslotest import base as oslo_test import sqlalchemy from sqlalchemy import Column, MetaData, Table diff --git a/oslo_db/tests/test_api.py b/oslo_db/tests/test_api.py index 5874a01..18dc586 100644 --- a/oslo_db/tests/test_api.py +++ b/oslo_db/tests/test_api.py @@ -16,8 +16,8 @@ """Unit tests for DB API.""" import mock -from oslo.config import cfg -from oslo.utils import importutils +from oslo_config import cfg +from oslo_utils import importutils from oslo_db import api from oslo_db import exception @@ -175,3 +175,24 @@ class DBReconnectTestCase(DBAPITestCase): self.assertNotEqual( 0, self.test_db_api.error_counter, 'Retry did not stop after sql_max_retries iterations.') + + +class DBRetryRequestCase(DBAPITestCase): + def test_retry_wrapper_succeeds(self): + @api.wrap_db_retry(max_retries=10, retry_on_request=True) + def some_method(): + pass + + some_method() + + def test_retry_wrapper_reaches_limit(self): + max_retries = 10 + + @api.wrap_db_retry(max_retries=10, retry_on_request=True) + def some_method(res): + res['result'] += 1 + raise exception.RetryRequest(ValueError()) + + res = {'result': 0} + self.assertRaises(ValueError, some_method, res) + self.assertEqual(max_retries + 1, res['result']) diff --git a/oslo_db/tests/utils.py b/oslo_db/tests/utils.py index 44eb1ae..00eb468 100644 --- a/oslo_db/tests/utils.py +++ b/oslo_db/tests/utils.py @@ -15,7 +15,7 @@ import contextlib -from oslo.config import cfg +from oslo_config import cfg from oslotest import base as test_base from oslotest import moxstubout import six diff --git a/requirements.txt b/requirements.txt index 3522b8d..9cacdd6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ pbr>=0.6,!=0.7,<1.0 alembic>=0.7.1 Babel>=1.3 iso8601>=0.1.9 -oslo.i18n>=1.0.0 # Apache-2.0 +oslo.i18n>=1.3.0 # Apache-2.0 oslo.config>=1.6.0 # Apache-2.0 oslo.utils>=1.2.0 # Apache-2.0 SQLAlchemy>=0.9.7,<=0.9.99 diff --git a/test-requirements-py2.txt b/test-requirements-py2.txt index 2cb1c75..e9e8455 100644 --- a/test-requirements-py2.txt +++ b/test-requirements-py2.txt @@ -17,4 +17,4 @@ oslotest>=1.2.0 # Apache-2.0 testrepository>=0.0.18 testscenarios>=0.4 testtools>=0.9.36,!=1.2.0 -tempest-lib +tempest-lib>=0.1.0 diff --git a/test-requirements-py3.txt b/test-requirements-py3.txt index d248785..0d1d43b 100644 --- a/test-requirements-py3.txt +++ b/test-requirements-py3.txt @@ -16,7 +16,7 @@ oslotest>=1.2.0 # Apache-2.0 testrepository>=0.0.18 testscenarios>=0.4 testtools>=0.9.36,!=1.2.0 -tempest-lib +tempest-lib>=0.1.0 # TODO(harlowja): add in pymysql when able to... # https://review.openstack.org/#/c/123737 |