From 276f7570d7af4a7a62d0e1ffb4edf904cfbf0600 Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Fri, 18 Apr 2014 12:02:53 +0300 Subject: Make the tests passing - add missed requirements - add required modules from openstack.common - added entry points to setup.cfg - fixed tests location - fixed incorrect common modules imports --- .testr.conf | 4 +- openstack-common.conf | 12 + oslo/db/api.py | 4 +- oslo/db/exception.py | 2 +- oslo/db/openstack/__init__.py | 0 oslo/db/openstack/common/__init__.py | 17 + oslo/db/openstack/common/context.py | 111 +++ oslo/db/openstack/common/fixture/__init__.py | 0 oslo/db/openstack/common/fixture/config.py | 85 +++ oslo/db/openstack/common/fixture/moxstubout.py | 43 ++ oslo/db/openstack/common/gettextutils.py | 449 ++++++++++++ oslo/db/openstack/common/importutils.py | 73 ++ oslo/db/openstack/common/timeutils.py | 210 ++++++ oslo/db/sqlalchemy/migration.py | 2 +- oslo/db/sqlalchemy/migration_cli/ext_migrate.py | 2 +- oslo/db/sqlalchemy/migration_cli/manager.py | 2 +- oslo/db/sqlalchemy/models.py | 2 +- oslo/db/sqlalchemy/session.py | 4 +- oslo/db/sqlalchemy/test_migrations.py | 2 +- oslo/db/sqlalchemy/utils.py | 6 +- requirements.txt | 9 +- setup.cfg | 10 +- test-requirements.txt | 4 +- tests/__init__.py | 13 - tests/base.py | 3 +- tests/sqlalchemy/__init__.py | 0 tests/sqlalchemy/test_migrate.py | 97 +++ tests/sqlalchemy/test_migrate_cli.py | 217 ++++++ tests/sqlalchemy/test_migration_common.py | 211 ++++++ tests/sqlalchemy/test_migrations.py | 154 ++++ tests/sqlalchemy/test_models.py | 132 ++++ tests/sqlalchemy/test_options.py | 120 ++++ tests/sqlalchemy/test_sqlalchemy.py | 542 ++++++++++++++ tests/sqlalchemy/test_utils.py | 838 ++++++++++++++++++++++ tests/test_api.py | 167 +++++ tests/test_db.py | 28 - tests/unit/db/__init__.py | 0 tests/unit/db/sqlalchemy/__init__.py | 0 tests/unit/db/sqlalchemy/test_migrate.py | 97 --- tests/unit/db/sqlalchemy/test_migrate_cli.py | 217 ------ tests/unit/db/sqlalchemy/test_migration_common.py | 211 ------ tests/unit/db/sqlalchemy/test_migrations.py | 154 ---- tests/unit/db/sqlalchemy/test_models.py | 132 ---- tests/unit/db/sqlalchemy/test_options.py | 120 ---- tests/unit/db/sqlalchemy/test_sqlalchemy.py | 542 -------------- tests/unit/db/sqlalchemy/test_utils.py | 838 ---------------------- tests/unit/db/test_api.py | 167 ----- tests/utils.py | 29 + tox.ini | 9 +- 49 files changed, 3549 insertions(+), 2542 deletions(-) create mode 100644 openstack-common.conf create mode 100644 oslo/db/openstack/__init__.py create mode 100644 oslo/db/openstack/common/__init__.py create mode 100644 oslo/db/openstack/common/context.py create mode 100644 oslo/db/openstack/common/fixture/__init__.py create mode 100644 oslo/db/openstack/common/fixture/config.py create mode 100644 oslo/db/openstack/common/fixture/moxstubout.py create mode 100644 oslo/db/openstack/common/gettextutils.py create mode 100644 oslo/db/openstack/common/importutils.py create mode 100644 oslo/db/openstack/common/timeutils.py create mode 100644 tests/sqlalchemy/__init__.py create mode 100644 tests/sqlalchemy/test_migrate.py create mode 100644 tests/sqlalchemy/test_migrate_cli.py create mode 100644 tests/sqlalchemy/test_migration_common.py create mode 100644 tests/sqlalchemy/test_migrations.py create mode 100644 tests/sqlalchemy/test_models.py create mode 100644 tests/sqlalchemy/test_options.py create mode 100644 tests/sqlalchemy/test_sqlalchemy.py create mode 100644 tests/sqlalchemy/test_utils.py create mode 100644 tests/test_api.py delete mode 100644 tests/test_db.py delete mode 100644 tests/unit/db/__init__.py delete mode 100644 tests/unit/db/sqlalchemy/__init__.py delete mode 100644 tests/unit/db/sqlalchemy/test_migrate.py delete mode 100644 tests/unit/db/sqlalchemy/test_migrate_cli.py delete mode 100644 tests/unit/db/sqlalchemy/test_migration_common.py delete mode 100644 tests/unit/db/sqlalchemy/test_migrations.py delete mode 100644 tests/unit/db/sqlalchemy/test_models.py delete mode 100644 tests/unit/db/sqlalchemy/test_options.py delete mode 100644 tests/unit/db/sqlalchemy/test_sqlalchemy.py delete mode 100644 tests/unit/db/sqlalchemy/test_utils.py delete mode 100644 tests/unit/db/test_api.py create mode 100644 tests/utils.py diff --git a/.testr.conf b/.testr.conf index fb62267..35d9ba4 100644 --- a/.testr.conf +++ b/.testr.conf @@ -2,6 +2,6 @@ test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \ - ${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION + ${PYTHON:-python} -m subunit.run discover -t ./ ./tests $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE -test_list_option=--list \ No newline at end of file +test_list_option=--list diff --git a/openstack-common.conf b/openstack-common.conf new file mode 100644 index 0000000..63e8e40 --- /dev/null +++ b/openstack-common.conf @@ -0,0 +1,12 @@ +[DEFAULT] + +# The list of modules to copy from oslo-incubator.git +module=context +module=gettextutils +module=fixture.moxstubout +module=importutils +module=fixture.config +module=timeutils + +# The base module to hold the copy of openstack.common +base=oslo.db diff --git a/oslo/db/api.py b/oslo/db/api.py index 28e9a82..a2a7d0b 100644 --- a/oslo/db/api.py +++ b/oslo/db/api.py @@ -26,8 +26,8 @@ import threading import time from oslo.db import exception -from openstack.common.gettextutils import _LE -from openstack.common import importutils +from oslo.db.openstack.common.gettextutils import _LE +from oslo.db.openstack.common import importutils LOG = logging.getLogger(__name__) diff --git a/oslo/db/exception.py b/oslo/db/exception.py index 601063e..fe9020e 100644 --- a/oslo/db/exception.py +++ b/oslo/db/exception.py @@ -18,7 +18,7 @@ import six -from openstack.common.gettextutils import _ +from oslo.db.openstack.common.gettextutils import _ class DBError(Exception): diff --git a/oslo/db/openstack/__init__.py b/oslo/db/openstack/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/oslo/db/openstack/common/__init__.py b/oslo/db/openstack/common/__init__.py new file mode 100644 index 0000000..d1223ea --- /dev/null +++ b/oslo/db/openstack/common/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + + +six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox')) diff --git a/oslo/db/openstack/common/context.py b/oslo/db/openstack/common/context.py new file mode 100644 index 0000000..3eeb445 --- /dev/null +++ b/oslo/db/openstack/common/context.py @@ -0,0 +1,111 @@ +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Simple class that stores security context information in the web request. + +Projects should subclass this class if they wish to enhance the request +context or provide additional information in their specific WSGI pipeline. +""" + +import itertools +import uuid + + +def generate_request_id(): + return b'req-' + str(uuid.uuid4()).encode('ascii') + + +class RequestContext(object): + + """Helper class to represent useful information about a request context. + + Stores information about the security context under which the user + accesses the system, as well as additional request information. + """ + + user_idt_format = '{user} {tenant} {domain} {user_domain} {p_domain}' + + def __init__(self, auth_token=None, user=None, tenant=None, domain=None, + user_domain=None, project_domain=None, is_admin=False, + read_only=False, show_deleted=False, request_id=None, + instance_uuid=None): + self.auth_token = auth_token + self.user = user + self.tenant = tenant + self.domain = domain + self.user_domain = user_domain + self.project_domain = project_domain + self.is_admin = is_admin + self.read_only = read_only + self.show_deleted = show_deleted + self.instance_uuid = instance_uuid + if not request_id: + request_id = generate_request_id() + self.request_id = request_id + + def to_dict(self): + user_idt = ( + self.user_idt_format.format(user=self.user or '-', + tenant=self.tenant or '-', + domain=self.domain or '-', + user_domain=self.user_domain or '-', + p_domain=self.project_domain or '-')) + + return {'user': self.user, + 'tenant': self.tenant, + 'domain': self.domain, + 'user_domain': self.user_domain, + 'project_domain': self.project_domain, + 'is_admin': self.is_admin, + 'read_only': self.read_only, + 'show_deleted': self.show_deleted, + 'auth_token': self.auth_token, + 'request_id': self.request_id, + 'instance_uuid': self.instance_uuid, + 'user_identity': user_idt} + + +def get_admin_context(show_deleted=False): + context = RequestContext(None, + tenant=None, + is_admin=True, + show_deleted=show_deleted) + return context + + +def get_context_from_function_and_args(function, args, kwargs): + """Find an arg of type RequestContext and return it. + + This is useful in a couple of decorators where we don't + know much about the function we're wrapping. + """ + + for arg in itertools.chain(kwargs.values(), args): + if isinstance(arg, RequestContext): + return arg + + return None + + +def is_user_context(context): + """Indicates if the request context is a normal user.""" + if not context: + return False + if context.is_admin: + return False + if not context.user_id or not context.project_id: + return False + return True diff --git a/oslo/db/openstack/common/fixture/__init__.py b/oslo/db/openstack/common/fixture/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/oslo/db/openstack/common/fixture/config.py b/oslo/db/openstack/common/fixture/config.py new file mode 100644 index 0000000..9489b85 --- /dev/null +++ b/oslo/db/openstack/common/fixture/config.py @@ -0,0 +1,85 @@ +# +# Copyright 2013 Mirantis, Inc. +# Copyright 2013 OpenStack Foundation +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import fixtures +from oslo.config import cfg +import six + + +class Config(fixtures.Fixture): + """Allows overriding configuration settings for the test. + + `conf` will be reset on cleanup. + + """ + + def __init__(self, conf=cfg.CONF): + self.conf = conf + + def setUp(self): + super(Config, self).setUp() + # NOTE(morganfainberg): unregister must be added to cleanup before + # reset is because cleanup works in reverse order of registered items, + # and a reset must occur before unregistering options can occur. + self.addCleanup(self._unregister_config_opts) + self.addCleanup(self.conf.reset) + self._registered_config_opts = {} + + def config(self, **kw): + """Override configuration values. + + The keyword arguments are the names of configuration options to + override and their values. + + If a `group` argument is supplied, the overrides are applied to + the specified configuration option group, otherwise the overrides + are applied to the ``default`` group. + + """ + + group = kw.pop('group', None) + for k, v in six.iteritems(kw): + self.conf.set_override(k, v, group) + + def _unregister_config_opts(self): + for group in self._registered_config_opts: + self.conf.unregister_opts(self._registered_config_opts[group], + group=group) + + def register_opt(self, opt, group=None): + """Register a single option for the test run. + + Options registered in this manner will automatically be unregistered + during cleanup. + + If a `group` argument is supplied, it will register the new option + to that group, otherwise the option is registered to the ``default`` + group. + """ + self.conf.register_opt(opt, group=group) + self._registered_config_opts.setdefault(group, set()).add(opt) + + def register_opts(self, opts, group=None): + """Register multiple options for the test run. + + This works in the same manner as register_opt() but takes a list of + options as the first argument. All arguments will be registered to the + same group if the ``group`` argument is supplied, otherwise all options + will be registered to the ``default`` group. + """ + for opt in opts: + self.register_opt(opt, group=group) diff --git a/oslo/db/openstack/common/fixture/moxstubout.py b/oslo/db/openstack/common/fixture/moxstubout.py new file mode 100644 index 0000000..e4cd025 --- /dev/null +++ b/oslo/db/openstack/common/fixture/moxstubout.py @@ -0,0 +1,43 @@ +# Copyright 2010 United States Government as represented by the +# Administrator of the National Aeronautics and Space Administration. +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +############################################################################## +############################################################################## +## +## DO NOT MODIFY THIS FILE +## +## This file is being graduated to the oslo.dbtest library. Please make all +## changes there, and only backport critical fixes here. - dhellmann +## +############################################################################## +############################################################################## + +import fixtures +from six.moves import mox + + +class MoxStubout(fixtures.Fixture): + """Deal with code around mox and stubout as a fixture.""" + + def setUp(self): + super(MoxStubout, self).setUp() + # emulate some of the mox stuff, we can't use the metaclass + # because it screws with our generators + self.mox = mox.Mox() + self.stubs = self.mox.stubs + self.addCleanup(self.mox.UnsetStubs) + self.addCleanup(self.mox.VerifyAll) diff --git a/oslo/db/openstack/common/gettextutils.py b/oslo/db/openstack/common/gettextutils.py new file mode 100644 index 0000000..e296d1a --- /dev/null +++ b/oslo/db/openstack/common/gettextutils.py @@ -0,0 +1,449 @@ +# Copyright 2012 Red Hat, Inc. +# Copyright 2013 IBM Corp. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +gettext for openstack-common modules. + +Usual usage in an openstack.common module: + + from oslo.db.openstack.common.gettextutils import _ +""" + +import copy +import functools +import gettext +import locale +from logging import handlers +import os + +from babel import localedata +import six + +_localedir = os.environ.get('oslo.db'.upper() + '_LOCALEDIR') +_t = gettext.translation('oslo.db', localedir=_localedir, fallback=True) + +# We use separate translation catalogs for each log level, so set up a +# mapping between the log level name and the translator. The domain +# for the log level is project_name + "-log-" + log_level so messages +# for each level end up in their own catalog. +_t_log_levels = dict( + (level, gettext.translation('oslo.db' + '-log-' + level, + localedir=_localedir, + fallback=True)) + for level in ['info', 'warning', 'error', 'critical'] +) + +_AVAILABLE_LANGUAGES = {} +USE_LAZY = False + + +def enable_lazy(): + """Convenience function for configuring _() to use lazy gettext + + Call this at the start of execution to enable the gettextutils._ + function to use lazy gettext functionality. This is useful if + your project is importing _ directly instead of using the + gettextutils.install() way of importing the _ function. + """ + global USE_LAZY + USE_LAZY = True + + +def _(msg): + if USE_LAZY: + return Message(msg, domain='oslo.db') + else: + if six.PY3: + return _t.gettext(msg) + return _t.ugettext(msg) + + +def _log_translation(msg, level): + """Build a single translation of a log message + """ + if USE_LAZY: + return Message(msg, domain='oslo.db' + '-log-' + level) + else: + translator = _t_log_levels[level] + if six.PY3: + return translator.gettext(msg) + return translator.ugettext(msg) + +# Translators for log levels. +# +# The abbreviated names are meant to reflect the usual use of a short +# name like '_'. The "L" is for "log" and the other letter comes from +# the level. +_LI = functools.partial(_log_translation, level='info') +_LW = functools.partial(_log_translation, level='warning') +_LE = functools.partial(_log_translation, level='error') +_LC = functools.partial(_log_translation, level='critical') + + +def install(domain, lazy=False): + """Install a _() function using the given translation domain. + + Given a translation domain, install a _() function using gettext's + install() function. + + The main difference from gettext.install() is that we allow + overriding the default localedir (e.g. /usr/share/locale) using + a translation-domain-specific environment variable (e.g. + NOVA_LOCALEDIR). + + :param domain: the translation domain + :param lazy: indicates whether or not to install the lazy _() function. + The lazy _() introduces a way to do deferred translation + of messages by installing a _ that builds Message objects, + instead of strings, which can then be lazily translated into + any available locale. + """ + if lazy: + # NOTE(mrodden): Lazy gettext functionality. + # + # The following introduces a deferred way to do translations on + # messages in OpenStack. We override the standard _() function + # and % (format string) operation to build Message objects that can + # later be translated when we have more information. + def _lazy_gettext(msg): + """Create and return a Message object. + + Lazy gettext function for a given domain, it is a factory method + for a project/module to get a lazy gettext function for its own + translation domain (i.e. nova, glance, cinder, etc.) + + Message encapsulates a string so that we can translate + it later when needed. + """ + return Message(msg, domain=domain) + + from six import moves + moves.builtins.__dict__['_'] = _lazy_gettext + else: + localedir = '%s_LOCALEDIR' % domain.upper() + if six.PY3: + gettext.install(domain, + localedir=os.environ.get(localedir)) + else: + gettext.install(domain, + localedir=os.environ.get(localedir), + unicode=True) + + +class Message(six.text_type): + """A Message object is a unicode object that can be translated. + + Translation of Message is done explicitly using the translate() method. + For all non-translation intents and purposes, a Message is simply unicode, + and can be treated as such. + """ + + def __new__(cls, msgid, msgtext=None, params=None, + domain='oslo.db', *args): + """Create a new Message object. + + In order for translation to work gettext requires a message ID, this + msgid will be used as the base unicode text. It is also possible + for the msgid and the base unicode text to be different by passing + the msgtext parameter. + """ + # If the base msgtext is not given, we use the default translation + # of the msgid (which is in English) just in case the system locale is + # not English, so that the base text will be in that locale by default. + if not msgtext: + msgtext = Message._translate_msgid(msgid, domain) + # We want to initialize the parent unicode with the actual object that + # would have been plain unicode if 'Message' was not enabled. + msg = super(Message, cls).__new__(cls, msgtext) + msg.msgid = msgid + msg.domain = domain + msg.params = params + return msg + + def translate(self, desired_locale=None): + """Translate this message to the desired locale. + + :param desired_locale: The desired locale to translate the message to, + if no locale is provided the message will be + translated to the system's default locale. + + :returns: the translated message in unicode + """ + + translated_message = Message._translate_msgid(self.msgid, + self.domain, + desired_locale) + if self.params is None: + # No need for more translation + return translated_message + + # This Message object may have been formatted with one or more + # Message objects as substitution arguments, given either as a single + # argument, part of a tuple, or as one or more values in a dictionary. + # When translating this Message we need to translate those Messages too + translated_params = _translate_args(self.params, desired_locale) + + translated_message = translated_message % translated_params + + return translated_message + + @staticmethod + def _translate_msgid(msgid, domain, desired_locale=None): + if not desired_locale: + system_locale = locale.getdefaultlocale() + # If the system locale is not available to the runtime use English + if not system_locale[0]: + desired_locale = 'en_US' + else: + desired_locale = system_locale[0] + + locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR') + lang = gettext.translation(domain, + localedir=locale_dir, + languages=[desired_locale], + fallback=True) + if six.PY3: + translator = lang.gettext + else: + translator = lang.ugettext + + translated_message = translator(msgid) + return translated_message + + def __mod__(self, other): + # When we mod a Message we want the actual operation to be performed + # by the parent class (i.e. unicode()), the only thing we do here is + # save the original msgid and the parameters in case of a translation + params = self._sanitize_mod_params(other) + unicode_mod = super(Message, self).__mod__(params) + modded = Message(self.msgid, + msgtext=unicode_mod, + params=params, + domain=self.domain) + return modded + + def _sanitize_mod_params(self, other): + """Sanitize the object being modded with this Message. + + - Add support for modding 'None' so translation supports it + - Trim the modded object, which can be a large dictionary, to only + those keys that would actually be used in a translation + - Snapshot the object being modded, in case the message is + translated, it will be used as it was when the Message was created + """ + if other is None: + params = (other,) + elif isinstance(other, dict): + # Merge the dictionaries + # Copy each item in case one does not support deep copy. + params = {} + if isinstance(self.params, dict): + for key, val in self.params.items(): + params[key] = self._copy_param(val) + for key, val in other.items(): + params[key] = self._copy_param(val) + else: + params = self._copy_param(other) + return params + + def _copy_param(self, param): + try: + return copy.deepcopy(param) + except Exception: + # Fallback to casting to unicode this will handle the + # python code-like objects that can't be deep-copied + return six.text_type(param) + + def __add__(self, other): + msg = _('Message objects do not support addition.') + raise TypeError(msg) + + def __radd__(self, other): + return self.__add__(other) + + if six.PY2: + def __str__(self): + # NOTE(luisg): Logging in python 2.6 tries to str() log records, + # and it expects specifically a UnicodeError in order to proceed. + msg = _('Message objects do not support str() because they may ' + 'contain non-ascii characters. ' + 'Please use unicode() or translate() instead.') + raise UnicodeError(msg) + + +def get_available_languages(domain): + """Lists the available languages for the given translation domain. + + :param domain: the domain to get languages for + """ + if domain in _AVAILABLE_LANGUAGES: + return copy.copy(_AVAILABLE_LANGUAGES[domain]) + + localedir = '%s_LOCALEDIR' % domain.upper() + find = lambda x: gettext.find(domain, + localedir=os.environ.get(localedir), + languages=[x]) + + # NOTE(mrodden): en_US should always be available (and first in case + # order matters) since our in-line message strings are en_US + language_list = ['en_US'] + # NOTE(luisg): Babel <1.0 used a function called list(), which was + # renamed to locale_identifiers() in >=1.0, the requirements master list + # requires >=0.9.6, uncapped, so defensively work with both. We can remove + # this check when the master list updates to >=1.0, and update all projects + list_identifiers = (getattr(localedata, 'list', None) or + getattr(localedata, 'locale_identifiers')) + locale_identifiers = list_identifiers() + + for i in locale_identifiers: + if find(i) is not None: + language_list.append(i) + + # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported + # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they + # are perfectly legitimate locales: + # https://github.com/mitsuhiko/babel/issues/37 + # In Babel 1.3 they fixed the bug and they support these locales, but + # they are still not explicitly "listed" by locale_identifiers(). + # That is why we add the locales here explicitly if necessary so that + # they are listed as supported. + aliases = {'zh': 'zh_CN', + 'zh_Hant_HK': 'zh_HK', + 'zh_Hant': 'zh_TW', + 'fil': 'tl_PH'} + for (locale, alias) in six.iteritems(aliases): + if locale in language_list and alias not in language_list: + language_list.append(alias) + + _AVAILABLE_LANGUAGES[domain] = language_list + return copy.copy(language_list) + + +def translate(obj, desired_locale=None): + """Gets the translated unicode representation of the given object. + + If the object is not translatable it is returned as-is. + If the locale is None the object is translated to the system locale. + + :param obj: the object to translate + :param desired_locale: the locale to translate the message to, if None the + default system locale will be used + :returns: the translated object in unicode, or the original object if + it could not be translated + """ + message = obj + if not isinstance(message, Message): + # If the object to translate is not already translatable, + # let's first get its unicode representation + message = six.text_type(obj) + if isinstance(message, Message): + # Even after unicoding() we still need to check if we are + # running with translatable unicode before translating + return message.translate(desired_locale) + return obj + + +def _translate_args(args, desired_locale=None): + """Translates all the translatable elements of the given arguments object. + + This method is used for translating the translatable values in method + arguments which include values of tuples or dictionaries. + If the object is not a tuple or a dictionary the object itself is + translated if it is translatable. + + If the locale is None the object is translated to the system locale. + + :param args: the args to translate + :param desired_locale: the locale to translate the args to, if None the + default system locale will be used + :returns: a new args object with the translated contents of the original + """ + if isinstance(args, tuple): + return tuple(translate(v, desired_locale) for v in args) + if isinstance(args, dict): + translated_dict = {} + for (k, v) in six.iteritems(args): + translated_v = translate(v, desired_locale) + translated_dict[k] = translated_v + return translated_dict + return translate(args, desired_locale) + + +class TranslationHandler(handlers.MemoryHandler): + """Handler that translates records before logging them. + + The TranslationHandler takes a locale and a target logging.Handler object + to forward LogRecord objects to after translating them. This handler + depends on Message objects being logged, instead of regular strings. + + The handler can be configured declaratively in the logging.conf as follows: + + [handlers] + keys = translatedlog, translator + + [handler_translatedlog] + class = handlers.WatchedFileHandler + args = ('/var/log/api-localized.log',) + formatter = context + + [handler_translator] + class = openstack.common.log.TranslationHandler + target = translatedlog + args = ('zh_CN',) + + If the specified locale is not available in the system, the handler will + log in the default locale. + """ + + def __init__(self, locale=None, target=None): + """Initialize a TranslationHandler + + :param locale: locale to use for translating messages + :param target: logging.Handler object to forward + LogRecord objects to after translation + """ + # NOTE(luisg): In order to allow this handler to be a wrapper for + # other handlers, such as a FileHandler, and still be able to + # configure it using logging.conf, this handler has to extend + # MemoryHandler because only the MemoryHandlers' logging.conf + # parsing is implemented such that it accepts a target handler. + handlers.MemoryHandler.__init__(self, capacity=0, target=target) + self.locale = locale + + def setFormatter(self, fmt): + self.target.setFormatter(fmt) + + def emit(self, record): + # We save the message from the original record to restore it + # after translation, so other handlers are not affected by this + original_msg = record.msg + original_args = record.args + + try: + self._translate_and_log_record(record) + finally: + record.msg = original_msg + record.args = original_args + + def _translate_and_log_record(self, record): + record.msg = translate(record.msg, self.locale) + + # In addition to translating the message, we also need to translate + # arguments that were passed to the log method that were not part + # of the main message e.g., log.info(_('Some message %s'), this_one)) + record.args = _translate_args(record.args, self.locale) + + self.target.emit(record) diff --git a/oslo/db/openstack/common/importutils.py b/oslo/db/openstack/common/importutils.py new file mode 100644 index 0000000..f40a843 --- /dev/null +++ b/oslo/db/openstack/common/importutils.py @@ -0,0 +1,73 @@ +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Import related utilities and helper functions. +""" + +import sys +import traceback + + +def import_class(import_str): + """Returns a class from a string including module and class.""" + mod_str, _sep, class_str = import_str.rpartition('.') + try: + __import__(mod_str) + return getattr(sys.modules[mod_str], class_str) + except (ValueError, AttributeError): + raise ImportError('Class %s cannot be found (%s)' % + (class_str, + traceback.format_exception(*sys.exc_info()))) + + +def import_object(import_str, *args, **kwargs): + """Import a class and return an instance of it.""" + return import_class(import_str)(*args, **kwargs) + + +def import_object_ns(name_space, import_str, *args, **kwargs): + """Tries to import object from default namespace. + + Imports a class and return an instance of it, first by trying + to find the class in a default namespace, then failing back to + a full path if not found in the default namespace. + """ + import_value = "%s.%s" % (name_space, import_str) + try: + return import_class(import_value)(*args, **kwargs) + except ImportError: + return import_class(import_str)(*args, **kwargs) + + +def import_module(import_str): + """Import a module.""" + __import__(import_str) + return sys.modules[import_str] + + +def import_versioned_module(version, submodule=None): + module = 'oslo.db.v%s' % version + if submodule: + module = '.'.join((module, submodule)) + return import_module(module) + + +def try_import(import_str, default=None): + """Try to import a module and if it fails return default.""" + try: + return import_module(import_str) + except ImportError: + return default diff --git a/oslo/db/openstack/common/timeutils.py b/oslo/db/openstack/common/timeutils.py new file mode 100644 index 0000000..52688a0 --- /dev/null +++ b/oslo/db/openstack/common/timeutils.py @@ -0,0 +1,210 @@ +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Time related utilities and helper functions. +""" + +import calendar +import datetime +import time + +import iso8601 +import six + + +# ISO 8601 extended time format with microseconds +_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' +_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' +PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND + + +def isotime(at=None, subsecond=False): + """Stringify time in ISO 8601 format.""" + if not at: + at = utcnow() + st = at.strftime(_ISO8601_TIME_FORMAT + if not subsecond + else _ISO8601_TIME_FORMAT_SUBSECOND) + tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' + st += ('Z' if tz == 'UTC' else tz) + return st + + +def parse_isotime(timestr): + """Parse time from ISO 8601 format.""" + try: + return iso8601.parse_date(timestr) + except iso8601.ParseError as e: + raise ValueError(six.text_type(e)) + except TypeError as e: + raise ValueError(six.text_type(e)) + + +def strtime(at=None, fmt=PERFECT_TIME_FORMAT): + """Returns formatted utcnow.""" + if not at: + at = utcnow() + return at.strftime(fmt) + + +def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): + """Turn a formatted time back into a datetime.""" + return datetime.datetime.strptime(timestr, fmt) + + +def normalize_time(timestamp): + """Normalize time in arbitrary timezone to UTC naive object.""" + offset = timestamp.utcoffset() + if offset is None: + return timestamp + return timestamp.replace(tzinfo=None) - offset + + +def is_older_than(before, seconds): + """Return True if before is older than seconds.""" + if isinstance(before, six.string_types): + before = parse_strtime(before).replace(tzinfo=None) + else: + before = before.replace(tzinfo=None) + + return utcnow() - before > datetime.timedelta(seconds=seconds) + + +def is_newer_than(after, seconds): + """Return True if after is newer than seconds.""" + if isinstance(after, six.string_types): + after = parse_strtime(after).replace(tzinfo=None) + else: + after = after.replace(tzinfo=None) + + return after - utcnow() > datetime.timedelta(seconds=seconds) + + +def utcnow_ts(): + """Timestamp version of our utcnow function.""" + if utcnow.override_time is None: + # NOTE(kgriffs): This is several times faster + # than going through calendar.timegm(...) + return int(time.time()) + + return calendar.timegm(utcnow().timetuple()) + + +def utcnow(): + """Overridable version of utils.utcnow.""" + if utcnow.override_time: + try: + return utcnow.override_time.pop(0) + except AttributeError: + return utcnow.override_time + return datetime.datetime.utcnow() + + +def iso8601_from_timestamp(timestamp): + """Returns a iso8601 formatted date from timestamp.""" + return isotime(datetime.datetime.utcfromtimestamp(timestamp)) + + +utcnow.override_time = None + + +def set_time_override(override_time=None): + """Overrides utils.utcnow. + + Make it return a constant time or a list thereof, one at a time. + + :param override_time: datetime instance or list thereof. If not + given, defaults to the current UTC time. + """ + utcnow.override_time = override_time or datetime.datetime.utcnow() + + +def advance_time_delta(timedelta): + """Advance overridden time using a datetime.timedelta.""" + assert(not utcnow.override_time is None) + try: + for dt in utcnow.override_time: + dt += timedelta + except TypeError: + utcnow.override_time += timedelta + + +def advance_time_seconds(seconds): + """Advance overridden time by seconds.""" + advance_time_delta(datetime.timedelta(0, seconds)) + + +def clear_time_override(): + """Remove the overridden time.""" + utcnow.override_time = None + + +def marshall_now(now=None): + """Make an rpc-safe datetime with microseconds. + + Note: tzinfo is stripped, but not required for relative times. + """ + if not now: + now = utcnow() + return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, + minute=now.minute, second=now.second, + microsecond=now.microsecond) + + +def unmarshall_time(tyme): + """Unmarshall a datetime dict.""" + return datetime.datetime(day=tyme['day'], + month=tyme['month'], + year=tyme['year'], + hour=tyme['hour'], + minute=tyme['minute'], + second=tyme['second'], + microsecond=tyme['microsecond']) + + +def delta_seconds(before, after): + """Return the difference between two timing objects. + + Compute the difference in seconds between two date, time, or + datetime objects (as a float, to microsecond resolution). + """ + delta = after - before + return total_seconds(delta) + + +def total_seconds(delta): + """Return the total seconds of datetime.timedelta object. + + Compute total seconds of datetime.timedelta, datetime.timedelta + doesn't have method total_seconds in Python2.6, calculate it manually. + """ + try: + return delta.total_seconds() + except AttributeError: + return ((delta.days * 24 * 3600) + delta.seconds + + float(delta.microseconds) / (10 ** 6)) + + +def is_soon(dt, window): + """Determines if time is going to happen in the next window seconds. + + :param dt: the time + :param window: minimum seconds to remain to consider the time not soon + + :return: True if expiration is within the given duration + """ + soon = (utcnow() + datetime.timedelta(seconds=window)) + return normalize_time(dt) <= soon diff --git a/oslo/db/sqlalchemy/migration.py b/oslo/db/sqlalchemy/migration.py index 5c69d27..72e1869 100644 --- a/oslo/db/sqlalchemy/migration.py +++ b/oslo/db/sqlalchemy/migration.py @@ -51,7 +51,7 @@ import sqlalchemy from sqlalchemy.schema import UniqueConstraint from oslo.db import exception -from openstack.common.gettextutils import _ +from oslo.db.openstack.common.gettextutils import _ def _get_unique_constraints(self, table): diff --git a/oslo/db/sqlalchemy/migration_cli/ext_migrate.py b/oslo/db/sqlalchemy/migration_cli/ext_migrate.py index cf5280b..bdd86c3 100644 --- a/oslo/db/sqlalchemy/migration_cli/ext_migrate.py +++ b/oslo/db/sqlalchemy/migration_cli/ext_migrate.py @@ -13,10 +13,10 @@ import logging import os +from oslo.db.openstack.common.gettextutils import _LE from oslo.db.sqlalchemy import migration from oslo.db.sqlalchemy.migration_cli import ext_base from oslo.db.sqlalchemy import session as db_session -from openstack.common.gettextutils import _LE LOG = logging.getLogger(__name__) diff --git a/oslo/db/sqlalchemy/migration_cli/manager.py b/oslo/db/sqlalchemy/migration_cli/manager.py index ccc2712..ee0cb5b 100644 --- a/oslo/db/sqlalchemy/migration_cli/manager.py +++ b/oslo/db/sqlalchemy/migration_cli/manager.py @@ -13,7 +13,7 @@ from stevedore import enabled -MIGRATION_NAMESPACE = 'openstack.common.migration' +MIGRATION_NAMESPACE = 'oslo.db.migration' def check_plugin_enabled(ext): diff --git a/oslo/db/sqlalchemy/models.py b/oslo/db/sqlalchemy/models.py index d52edcd..f30e9e3 100644 --- a/oslo/db/sqlalchemy/models.py +++ b/oslo/db/sqlalchemy/models.py @@ -26,7 +26,7 @@ from sqlalchemy import Column, Integer from sqlalchemy import DateTime from sqlalchemy.orm import object_mapper -from openstack.common import timeutils +from oslo.db.openstack.common import timeutils class ModelBase(six.Iterator): diff --git a/oslo/db/sqlalchemy/session.py b/oslo/db/sqlalchemy/session.py index 90ff934..9bf4b6d 100644 --- a/oslo/db/sqlalchemy/session.py +++ b/oslo/db/sqlalchemy/session.py @@ -291,8 +291,8 @@ from sqlalchemy.pool import NullPool, StaticPool from sqlalchemy.sql.expression import literal_column from oslo.db import exception -from openstack.common.gettextutils import _LE, _LW -from openstack.common import timeutils +from oslo.db.openstack.common.gettextutils import _LE, _LW +from oslo.db.openstack.common import timeutils LOG = logging.getLogger(__name__) diff --git a/oslo/db/sqlalchemy/test_migrations.py b/oslo/db/sqlalchemy/test_migrations.py index 886bb04..661b0a7 100644 --- a/oslo/db/sqlalchemy/test_migrations.py +++ b/oslo/db/sqlalchemy/test_migrations.py @@ -26,8 +26,8 @@ from six.moves.urllib import parse import sqlalchemy import sqlalchemy.exc +from oslo.db.openstack.common.gettextutils import _LE from oslo.db.sqlalchemy import utils -from openstack.common.gettextutils import _LE LOG = logging.getLogger(__name__) diff --git a/oslo/db/sqlalchemy/utils.py b/oslo/db/sqlalchemy/utils.py index 02daea5..a71305d 100644 --- a/oslo/db/sqlalchemy/utils.py +++ b/oslo/db/sqlalchemy/utils.py @@ -36,10 +36,10 @@ from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.types import NullType -from openstack.common import context as request_context +from oslo.db.openstack.common import context as request_context +from oslo.db.openstack.common.gettextutils import _, _LI, _LW +from oslo.db.openstack.common import timeutils from oslo.db.sqlalchemy import models -from openstack.common.gettextutils import _, _LI, _LW -from openstack.common import timeutils LOG = logging.getLogger(__name__) diff --git a/requirements.txt b/requirements.txt index dbb4dd1..0fde1f8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,8 @@ -Babel>=0.9.6 \ No newline at end of file +alembic>=0.4.1 +Babel>=0.9.6 +iso8601>=0.1.9 +lockfile>=0.8 +oslo.config>=1.2.0 +SQLAlchemy>=0.7.8,<=0.9.99 +sqlalchemy-migrate>=0.8.2,!=0.8.4 +stevedore>=0.14 diff --git a/setup.cfg b/setup.cfg index efd771c..efc991c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -26,6 +26,14 @@ packages = namespace_packages = oslo +[entry_points] +oslo.config.opts = + oslo.db = oslo.db.options:list_opts + +oslo.db.migration = + alembic = oslo.db.sqlalchemy.migration_cli.ext_alembic:AlembicExtension + migrate = oslo.db.sqlalchemy.migration_cli.ext_migrate:MigrateExtension + [build_sphinx] source-dir = doc/source build-dir = doc/build @@ -46,4 +54,4 @@ input_file = oslo.db/locale/oslo.db.pot [extract_messages] keywords = _ gettext ngettext l_ lazy_gettext mapping_file = babel.cfg -output_file = oslo.db/locale/oslo.db.pot \ No newline at end of file +output_file = oslo.db/locale/oslo.db.pot diff --git a/test-requirements.txt b/test-requirements.txt index 2a46bd8..d7a1da2 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -3,9 +3,11 @@ hacking>=0.5.6,<0.8 coverage>=3.6 discover fixtures>=0.3.14 +MySQL-python python-subunit sphinx>=1.1.2 oslosphinx +oslotest testrepository>=0.0.17 testscenarios>=0.4,<0.5 -testtools>=0.9.32 \ No newline at end of file +testtools>=0.9.32 diff --git a/tests/__init__.py b/tests/__init__.py index f88664e..e69de29 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. \ No newline at end of file diff --git a/tests/base.py b/tests/base.py index f9a09a8..a3069ed 100644 --- a/tests/base.py +++ b/tests/base.py @@ -24,6 +24,7 @@ _TRUE_VALUES = ('true', '1', 'yes') # FIXME(dhellmann) Update this to use oslo.test library + class TestCase(testtools.TestCase): """Test case base class for all unit tests.""" @@ -51,4 +52,4 @@ class TestCase(testtools.TestCase): stderr = self.useFixture(fixtures.StringStream('stderr')).stream self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) - self.log_fixture = self.useFixture(fixtures.FakeLogger()) \ No newline at end of file + self.log_fixture = self.useFixture(fixtures.FakeLogger()) diff --git a/tests/sqlalchemy/__init__.py b/tests/sqlalchemy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/sqlalchemy/test_migrate.py b/tests/sqlalchemy/test_migrate.py new file mode 100644 index 0000000..23833e1 --- /dev/null +++ b/tests/sqlalchemy/test_migrate.py @@ -0,0 +1,97 @@ +# Copyright (c) 2013 OpenStack Foundation +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from migrate.changeset.constraint import UniqueConstraint +from migrate.changeset.databases import sqlite +import sqlalchemy as sa + +from oslo.db.sqlalchemy import migration +from oslo.db.sqlalchemy import test_base + + +def uniques(*constraints): + """Make a sequence of UniqueConstraint instances easily comparable + + Convert a sequence of UniqueConstraint instances into a set of + tuples of form (constraint_name, (constraint_columns)) so that + assertEqual() will be able to compare sets of unique constraints + + """ + + return set((uc.name, tuple(uc.columns.keys())) for uc in constraints) + + +class TestSqliteUniqueConstraints(test_base.DbTestCase): + def setUp(self): + super(TestSqliteUniqueConstraints, self).setUp() + + migration.patch_migrate() + + self.helper = sqlite.SQLiteHelper() + + test_table = sa.Table( + 'test_table', + sa.schema.MetaData(bind=self.engine), + sa.Column('a', sa.Integer), + sa.Column('b', sa.String(10)), + sa.Column('c', sa.Integer), + sa.UniqueConstraint('a', 'b', name='unique_a_b'), + sa.UniqueConstraint('b', 'c', name='unique_b_c') + ) + test_table.create() + self.addCleanup(test_table.drop) + # NOTE(rpodolyaka): it's important to use the reflected table here + # rather than original one because this is what + # we actually do in db migrations code + self.reflected_table = sa.Table( + 'test_table', + sa.schema.MetaData(bind=self.engine), + autoload=True + ) + + @test_base.backend_specific('sqlite') + def test_get_unique_constraints(self): + table = self.reflected_table + + existing = uniques(*self.helper._get_unique_constraints(table)) + should_be = uniques( + sa.UniqueConstraint(table.c.a, table.c.b, name='unique_a_b'), + sa.UniqueConstraint(table.c.b, table.c.c, name='unique_b_c'), + ) + self.assertEqual(should_be, existing) + + @test_base.backend_specific('sqlite') + def test_add_unique_constraint(self): + table = self.reflected_table + UniqueConstraint(table.c.a, table.c.c, name='unique_a_c').create() + + existing = uniques(*self.helper._get_unique_constraints(table)) + should_be = uniques( + sa.UniqueConstraint(table.c.a, table.c.b, name='unique_a_b'), + sa.UniqueConstraint(table.c.b, table.c.c, name='unique_b_c'), + sa.UniqueConstraint(table.c.a, table.c.c, name='unique_a_c'), + ) + self.assertEqual(should_be, existing) + + @test_base.backend_specific('sqlite') + def test_drop_unique_constraint(self): + table = self.reflected_table + UniqueConstraint(table.c.a, table.c.b, name='unique_a_b').drop() + + existing = uniques(*self.helper._get_unique_constraints(table)) + should_be = uniques( + sa.UniqueConstraint(table.c.b, table.c.c, name='unique_b_c'), + ) + self.assertEqual(should_be, existing) diff --git a/tests/sqlalchemy/test_migrate_cli.py b/tests/sqlalchemy/test_migrate_cli.py new file mode 100644 index 0000000..f23ee87 --- /dev/null +++ b/tests/sqlalchemy/test_migrate_cli.py @@ -0,0 +1,217 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock +from oslotest import base as test_base + +from oslo.db.sqlalchemy.migration_cli import ext_alembic +from oslo.db.sqlalchemy.migration_cli import ext_migrate +from oslo.db.sqlalchemy.migration_cli import manager + + +class MockWithCmp(mock.MagicMock): + + order = 0 + + def __cmp__(self, other): + return self.order > other.order + + +@mock.patch(('oslo.db.sqlalchemy.migration_cli.' + 'ext_alembic.alembic.command')) +class TestAlembicExtension(test_base.BaseTestCase): + + def setUp(self): + self.migration_config = {'alembic_ini_path': '.', + 'db_url': 'sqlite://'} + self.alembic = ext_alembic.AlembicExtension(self.migration_config) + super(TestAlembicExtension, self).setUp() + + def test_check_enabled_true(self, command): + """Verifies that enabled returns True on non empty + alembic_ini_path conf variable + """ + self.assertTrue(self.alembic.enabled) + + def test_check_enabled_false(self, command): + """Verifies enabled returns False on empty + alembic_ini_path variable + """ + self.migration_config['alembic_ini_path'] = '' + alembic = ext_alembic.AlembicExtension(self.migration_config) + self.assertFalse(alembic.enabled) + + def test_upgrade_none(self, command): + self.alembic.upgrade(None) + command.upgrade.assert_called_once_with(self.alembic.config, 'head') + + def test_upgrade_normal(self, command): + self.alembic.upgrade('131daa') + command.upgrade.assert_called_once_with(self.alembic.config, '131daa') + + def test_downgrade_none(self, command): + self.alembic.downgrade(None) + command.downgrade.assert_called_once_with(self.alembic.config, 'base') + + def test_downgrade_int(self, command): + self.alembic.downgrade(111) + command.downgrade.assert_called_once_with(self.alembic.config, 'base') + + def test_downgrade_normal(self, command): + self.alembic.downgrade('131daa') + command.downgrade.assert_called_once_with( + self.alembic.config, '131daa') + + def test_revision(self, command): + self.alembic.revision(message='test', autogenerate=True) + command.revision.assert_called_once_with( + self.alembic.config, message='test', autogenerate=True) + + def test_stamp(self, command): + self.alembic.stamp('stamp') + command.stamp.assert_called_once_with( + self.alembic.config, revision='stamp') + + def test_version(self, command): + version = self.alembic.version() + self.assertIsNone(version) + + +@mock.patch(('oslo.db.sqlalchemy.migration_cli.' + 'ext_migrate.migration')) +class TestMigrateExtension(test_base.BaseTestCase): + + def setUp(self): + self.migration_config = {'migration_repo_path': '.', + 'db_url': 'sqlite://'} + self.migrate = ext_migrate.MigrateExtension(self.migration_config) + super(TestMigrateExtension, self).setUp() + + def test_check_enabled_true(self, migration): + self.assertTrue(self.migrate.enabled) + + def test_check_enabled_false(self, migration): + self.migration_config['migration_repo_path'] = '' + migrate = ext_migrate.MigrateExtension(self.migration_config) + self.assertFalse(migrate.enabled) + + def test_upgrade_head(self, migration): + self.migrate.upgrade('head') + migration.db_sync.assert_called_once_with( + self.migrate.engine, self.migrate.repository, None, init_version=0) + + def test_upgrade_normal(self, migration): + self.migrate.upgrade(111) + migration.db_sync.assert_called_once_with( + mock.ANY, self.migrate.repository, 111, init_version=0) + + def test_downgrade_init_version_from_base(self, migration): + self.migrate.downgrade('base') + migration.db_sync.assert_called_once_with( + self.migrate.engine, self.migrate.repository, mock.ANY, + init_version=mock.ANY) + + def test_downgrade_init_version_from_none(self, migration): + self.migrate.downgrade(None) + migration.db_sync.assert_called_once_with( + self.migrate.engine, self.migrate.repository, mock.ANY, + init_version=mock.ANY) + + def test_downgrade_normal(self, migration): + self.migrate.downgrade(101) + migration.db_sync.assert_called_once_with( + self.migrate.engine, self.migrate.repository, 101, init_version=0) + + def test_version(self, migration): + self.migrate.version() + migration.db_version.assert_called_once_with( + self.migrate.engine, self.migrate.repository, init_version=0) + + def test_change_init_version(self, migration): + self.migration_config['init_version'] = 101 + migrate = ext_migrate.MigrateExtension(self.migration_config) + migrate.downgrade(None) + migration.db_sync.assert_called_once_with( + migrate.engine, + self.migrate.repository, + self.migration_config['init_version'], + init_version=self.migration_config['init_version']) + + +class TestMigrationManager(test_base.BaseTestCase): + + def setUp(self): + self.migration_config = {'alembic_ini_path': '.', + 'migrate_repo_path': '.', + 'db_url': 'sqlite://'} + self.migration_manager = manager.MigrationManager( + self.migration_config) + self.ext = mock.Mock() + self.migration_manager._manager.extensions = [self.ext] + super(TestMigrationManager, self).setUp() + + def test_manager_update(self): + self.migration_manager.upgrade('head') + self.ext.obj.upgrade.assert_called_once_with('head') + + def test_manager_update_revision_none(self): + self.migration_manager.upgrade(None) + self.ext.obj.upgrade.assert_called_once_with(None) + + def test_downgrade_normal_revision(self): + self.migration_manager.downgrade('111abcd') + self.ext.obj.downgrade.assert_called_once_with('111abcd') + + def test_version(self): + self.migration_manager.version() + self.ext.obj.version.assert_called_once_with() + + def test_revision_message_autogenerate(self): + self.migration_manager.revision('test', True) + self.ext.obj.revision.assert_called_once_with('test', True) + + def test_revision_only_message(self): + self.migration_manager.revision('test', False) + self.ext.obj.revision.assert_called_once_with('test', False) + + def test_stamp(self): + self.migration_manager.stamp('stamp') + self.ext.obj.stamp.assert_called_once_with('stamp') + + +class TestMigrationRightOrder(test_base.BaseTestCase): + + def setUp(self): + self.migration_config = {'alembic_ini_path': '.', + 'migrate_repo_path': '.', + 'db_url': 'sqlite://'} + self.migration_manager = manager.MigrationManager( + self.migration_config) + self.first_ext = MockWithCmp() + self.first_ext.obj.order = 1 + self.first_ext.obj.upgrade.return_value = 100 + self.first_ext.obj.downgrade.return_value = 0 + self.second_ext = MockWithCmp() + self.second_ext.obj.order = 2 + self.second_ext.obj.upgrade.return_value = 200 + self.second_ext.obj.downgrade.return_value = 100 + self.migration_manager._manager.extensions = [self.first_ext, + self.second_ext] + super(TestMigrationRightOrder, self).setUp() + + def test_upgrade_right_order(self): + results = self.migration_manager.upgrade(None) + self.assertEqual(results, [100, 200]) + + def test_downgrade_right_order(self): + results = self.migration_manager.downgrade(None) + self.assertEqual(results, [100, 0]) diff --git a/tests/sqlalchemy/test_migration_common.py b/tests/sqlalchemy/test_migration_common.py new file mode 100644 index 0000000..9f05aff --- /dev/null +++ b/tests/sqlalchemy/test_migration_common.py @@ -0,0 +1,211 @@ +# Copyright 2013 Mirantis Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +import contextlib +import os +import tempfile + +from migrate import exceptions as migrate_exception +from migrate.versioning import api as versioning_api +import mock +import sqlalchemy + +from oslo.db import exception as db_exception +from oslo.db.sqlalchemy import migration +from oslo.db.sqlalchemy import test_base + + +class TestMigrationCommon(test_base.DbTestCase): + def setUp(self): + super(TestMigrationCommon, self).setUp() + + migration._REPOSITORY = None + self.path = tempfile.mkdtemp('test_migration') + self.path1 = tempfile.mkdtemp('test_migration') + self.return_value = '/home/openstack/migrations' + self.return_value1 = '/home/extension/migrations' + self.init_version = 1 + self.test_version = 123 + + self.patcher_repo = mock.patch.object(migration, 'Repository') + self.repository = self.patcher_repo.start() + self.repository.side_effect = [self.return_value, self.return_value1] + + self.mock_api_db = mock.patch.object(versioning_api, 'db_version') + self.mock_api_db_version = self.mock_api_db.start() + self.mock_api_db_version.return_value = self.test_version + + def tearDown(self): + os.rmdir(self.path) + self.mock_api_db.stop() + self.patcher_repo.stop() + super(TestMigrationCommon, self).tearDown() + + def test_find_migrate_repo_path_not_found(self): + self.assertRaises( + db_exception.DbMigrationError, + migration._find_migrate_repo, + "/foo/bar/", + ) + self.assertIsNone(migration._REPOSITORY) + + def test_find_migrate_repo_called_once(self): + my_repository = migration._find_migrate_repo(self.path) + self.repository.assert_called_once_with(self.path) + self.assertEqual(my_repository, self.return_value) + + def test_find_migrate_repo_called_few_times(self): + repo1 = migration._find_migrate_repo(self.path) + repo2 = migration._find_migrate_repo(self.path1) + self.assertNotEqual(repo1, repo2) + + def test_db_version_control(self): + with contextlib.nested( + mock.patch.object(migration, '_find_migrate_repo'), + mock.patch.object(versioning_api, 'version_control'), + ) as (mock_find_repo, mock_version_control): + mock_find_repo.return_value = self.return_value + + version = migration.db_version_control( + self.engine, self.path, self.test_version) + + self.assertEqual(version, self.test_version) + mock_version_control.assert_called_once_with( + self.engine, self.return_value, self.test_version) + + def test_db_version_return(self): + ret_val = migration.db_version(self.engine, self.path, + self.init_version) + self.assertEqual(ret_val, self.test_version) + + def test_db_version_raise_not_controlled_error_first(self): + with mock.patch.object(migration, 'db_version_control') as mock_ver: + + self.mock_api_db_version.side_effect = [ + migrate_exception.DatabaseNotControlledError('oups'), + self.test_version] + + ret_val = migration.db_version(self.engine, self.path, + self.init_version) + self.assertEqual(ret_val, self.test_version) + mock_ver.assert_called_once_with(self.engine, self.path, + version=self.init_version) + + def test_db_version_raise_not_controlled_error_tables(self): + with mock.patch.object(sqlalchemy, 'MetaData') as mock_meta: + self.mock_api_db_version.side_effect = \ + migrate_exception.DatabaseNotControlledError('oups') + my_meta = mock.MagicMock() + my_meta.tables = {'a': 1, 'b': 2} + mock_meta.return_value = my_meta + + self.assertRaises( + db_exception.DbMigrationError, migration.db_version, + self.engine, self.path, self.init_version) + + @mock.patch.object(versioning_api, 'version_control') + def test_db_version_raise_not_controlled_error_no_tables(self, mock_vc): + with mock.patch.object(sqlalchemy, 'MetaData') as mock_meta: + self.mock_api_db_version.side_effect = ( + migrate_exception.DatabaseNotControlledError('oups'), + self.init_version) + my_meta = mock.MagicMock() + my_meta.tables = {} + mock_meta.return_value = my_meta + migration.db_version(self.engine, self.path, self.init_version) + + mock_vc.assert_called_once_with(self.engine, self.return_value1, + self.init_version) + + def test_db_sync_wrong_version(self): + self.assertRaises(db_exception.DbMigrationError, + migration.db_sync, self.engine, self.path, 'foo') + + def test_db_sync_upgrade(self): + init_ver = 55 + with contextlib.nested( + mock.patch.object(migration, '_find_migrate_repo'), + mock.patch.object(versioning_api, 'upgrade') + ) as (mock_find_repo, mock_upgrade): + + mock_find_repo.return_value = self.return_value + self.mock_api_db_version.return_value = self.test_version - 1 + + migration.db_sync(self.engine, self.path, self.test_version, + init_ver) + + mock_upgrade.assert_called_once_with( + self.engine, self.return_value, self.test_version) + + def test_db_sync_downgrade(self): + with contextlib.nested( + mock.patch.object(migration, '_find_migrate_repo'), + mock.patch.object(versioning_api, 'downgrade') + ) as (mock_find_repo, mock_downgrade): + + mock_find_repo.return_value = self.return_value + self.mock_api_db_version.return_value = self.test_version + 1 + + migration.db_sync(self.engine, self.path, self.test_version) + + mock_downgrade.assert_called_once_with( + self.engine, self.return_value, self.test_version) + + def test_db_sync_sanity_called(self): + with contextlib.nested( + mock.patch.object(migration, '_find_migrate_repo'), + mock.patch.object(migration, '_db_schema_sanity_check'), + mock.patch.object(versioning_api, 'downgrade') + ) as (mock_find_repo, mock_sanity, mock_downgrade): + + mock_find_repo.return_value = self.return_value + migration.db_sync(self.engine, self.path, self.test_version) + + mock_sanity.assert_called_once() + + def test_db_sync_sanity_skipped(self): + with contextlib.nested( + mock.patch.object(migration, '_find_migrate_repo'), + mock.patch.object(migration, '_db_schema_sanity_check'), + mock.patch.object(versioning_api, 'downgrade') + ) as (mock_find_repo, mock_sanity, mock_downgrade): + + mock_find_repo.return_value = self.return_value + migration.db_sync(self.engine, self.path, self.test_version, False) + + mock_sanity.assert_not_called() + + def test_db_sanity_table_not_utf8(self): + with mock.patch.object(self, 'engine') as mock_eng: + type(mock_eng).name = mock.PropertyMock(return_value='mysql') + mock_eng.execute.return_value = [['table_A', 'latin1'], + ['table_B', 'latin1']] + + self.assertRaises(ValueError, migration._db_schema_sanity_check, + mock_eng) + + def test_db_sanity_table_not_utf8_exclude_migrate_tables(self): + with mock.patch.object(self, 'engine') as mock_eng: + type(mock_eng).name = mock.PropertyMock(return_value='mysql') + # NOTE(morganfainberg): Check both lower and upper case versions + # of the migration table names (validate case insensitivity in + # the sanity check. + mock_eng.execute.return_value = [['migrate_version', 'latin1'], + ['alembic_version', 'latin1'], + ['MIGRATE_VERSION', 'latin1'], + ['ALEMBIC_VERSION', 'latin1']] + + migration._db_schema_sanity_check(mock_eng) diff --git a/tests/sqlalchemy/test_migrations.py b/tests/sqlalchemy/test_migrations.py new file mode 100644 index 0000000..f39bb73 --- /dev/null +++ b/tests/sqlalchemy/test_migrations.py @@ -0,0 +1,154 @@ +# Copyright 2010-2011 OpenStack Foundation +# Copyright 2012-2013 IBM Corp. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock +from oslotest import base as test_base + +from oslo.db.sqlalchemy import test_migrations as migrate + + +class TestWalkVersions(test_base.BaseTestCase, migrate.WalkVersionsMixin): + def setUp(self): + super(TestWalkVersions, self).setUp() + self.migration_api = mock.MagicMock() + self.engine = mock.MagicMock() + self.REPOSITORY = mock.MagicMock() + self.INIT_VERSION = 4 + + def test_migrate_up(self): + self.migration_api.db_version.return_value = 141 + + self._migrate_up(self.engine, 141) + + self.migration_api.upgrade.assert_called_with( + self.engine, self.REPOSITORY, 141) + self.migration_api.db_version.assert_called_with( + self.engine, self.REPOSITORY) + + def test_migrate_up_with_data(self): + test_value = {"a": 1, "b": 2} + self.migration_api.db_version.return_value = 141 + self._pre_upgrade_141 = mock.MagicMock() + self._pre_upgrade_141.return_value = test_value + self._check_141 = mock.MagicMock() + + self._migrate_up(self.engine, 141, True) + + self._pre_upgrade_141.assert_called_with(self.engine) + self._check_141.assert_called_with(self.engine, test_value) + + def test_migrate_down(self): + self.migration_api.db_version.return_value = 42 + + self.assertTrue(self._migrate_down(self.engine, 42)) + self.migration_api.db_version.assert_called_with( + self.engine, self.REPOSITORY) + + def test_migrate_down_not_implemented(self): + self.migration_api.downgrade.side_effect = NotImplementedError + self.assertFalse(self._migrate_down(self.engine, 42)) + + def test_migrate_down_with_data(self): + self._post_downgrade_043 = mock.MagicMock() + self.migration_api.db_version.return_value = 42 + + self._migrate_down(self.engine, 42, True) + + self._post_downgrade_043.assert_called_with(self.engine) + + @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up') + @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down') + def test_walk_versions_all_default(self, _migrate_up, _migrate_down): + self.REPOSITORY.latest = 20 + self.migration_api.db_version.return_value = self.INIT_VERSION + + self._walk_versions() + + self.migration_api.version_control.assert_called_with( + None, self.REPOSITORY, self.INIT_VERSION) + self.migration_api.db_version.assert_called_with( + None, self.REPOSITORY) + + versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) + upgraded = [mock.call(None, v, with_data=True) for v in versions] + self.assertEqual(self._migrate_up.call_args_list, upgraded) + + downgraded = [mock.call(None, v - 1) for v in reversed(versions)] + self.assertEqual(self._migrate_down.call_args_list, downgraded) + + @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up') + @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down') + def test_walk_versions_all_true(self, _migrate_up, _migrate_down): + self.REPOSITORY.latest = 20 + self.migration_api.db_version.return_value = self.INIT_VERSION + + self._walk_versions(self.engine, snake_walk=True, downgrade=True) + + versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) + upgraded = [] + for v in versions: + upgraded.append(mock.call(self.engine, v, with_data=True)) + upgraded.append(mock.call(self.engine, v)) + upgraded.extend( + [mock.call(self.engine, v) for v in reversed(versions)] + ) + self.assertEqual(upgraded, self._migrate_up.call_args_list) + + downgraded_1 = [ + mock.call(self.engine, v - 1, with_data=True) for v in versions + ] + downgraded_2 = [] + for v in reversed(versions): + downgraded_2.append(mock.call(self.engine, v - 1)) + downgraded_2.append(mock.call(self.engine, v - 1)) + downgraded = downgraded_1 + downgraded_2 + self.assertEqual(self._migrate_down.call_args_list, downgraded) + + @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up') + @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down') + def test_walk_versions_true_false(self, _migrate_up, _migrate_down): + self.REPOSITORY.latest = 20 + self.migration_api.db_version.return_value = self.INIT_VERSION + + self._walk_versions(self.engine, snake_walk=True, downgrade=False) + + versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) + + upgraded = [] + for v in versions: + upgraded.append(mock.call(self.engine, v, with_data=True)) + upgraded.append(mock.call(self.engine, v)) + self.assertEqual(upgraded, self._migrate_up.call_args_list) + + downgraded = [ + mock.call(self.engine, v - 1, with_data=True) for v in versions + ] + self.assertEqual(self._migrate_down.call_args_list, downgraded) + + @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up') + @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down') + def test_walk_versions_all_false(self, _migrate_up, _migrate_down): + self.REPOSITORY.latest = 20 + self.migration_api.db_version.return_value = self.INIT_VERSION + + self._walk_versions(self.engine, snake_walk=False, downgrade=False) + + versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) + + upgraded = [ + mock.call(self.engine, v, with_data=True) for v in versions + ] + self.assertEqual(upgraded, self._migrate_up.call_args_list) diff --git a/tests/sqlalchemy/test_models.py b/tests/sqlalchemy/test_models.py new file mode 100644 index 0000000..8c23da8 --- /dev/null +++ b/tests/sqlalchemy/test_models.py @@ -0,0 +1,132 @@ +# Copyright 2012 Cloudscaling Group, Inc. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslotest import base as oslo_test +from sqlalchemy import Column +from sqlalchemy import Integer, String +from sqlalchemy.ext.declarative import declarative_base + +from oslo.db.sqlalchemy import models +from oslo.db.sqlalchemy import test_base + + +BASE = declarative_base() + + +class ModelBaseTest(test_base.DbTestCase): + + def test_modelbase_has_dict_methods(self): + dict_methods = ('__getitem__', + '__setitem__', + '__iter__', + 'get', + 'next', + 'update', + 'save', + 'iteritems') + for method in dict_methods: + self.assertTrue(hasattr(models.ModelBase, method)) + + def test_modelbase_set(self): + mb = models.ModelBase() + mb['world'] = 'hello' + self.assertEqual(mb['world'], 'hello') + + def test_modelbase_update(self): + mb = models.ModelBase() + h = {'a': '1', 'b': '2'} + mb.update(h) + for key in h.keys(): + self.assertEqual(mb[key], h[key]) + + def test_modelbase_iteritems(self): + self.skipTest("Requires DB") + mb = models.ModelBase() + h = {'a': '1', 'b': '2'} + mb.update(h) + for key, value in mb.iteritems(): + self.assertEqual(h[key], value) + + def test_modelbase_iter(self): + self.skipTest("Requires DB") + mb = models.ModelBase() + h = {'a': '1', 'b': '2'} + mb.update(h) + i = iter(mb) + + min_items = len(h) + found_items = 0 + while True: + r = next(i, None) + if r is None: + break + + self.assertTrue(r in h) + found_items += 1 + + self.assertEqual(min_items, found_items) + + def test_extra_keys_empty(self): + """Test verifies that by default extra_keys return empty list.""" + mb = models.ModelBase() + self.assertEqual(mb._extra_keys, []) + + def test_extra_keys_defined(self): + """Property _extra_keys will return list with attributes names.""" + ekm = ExtraKeysModel() + self.assertEqual(ekm._extra_keys, ['name']) + + def test_model_with_extra_keys(self): + item = ExtraKeysModel() + data = dict(item) + self.assertEqual(data, {'smth': None, + 'id': None, + 'name': 'NAME'}) + + +class ExtraKeysModel(BASE, models.ModelBase): + __tablename__ = 'test_model' + + id = Column(Integer, primary_key=True) + smth = Column(String(255)) + + @property + def name(self): + return 'NAME' + + @property + def _extra_keys(self): + return ['name'] + + +class TimestampMixinTest(oslo_test.BaseTestCase): + + def test_timestampmixin_attr(self): + + class TestModel(models.ModelBase, models.TimestampMixin): + pass + + dict_methods = ('__getitem__', + '__setitem__', + '__iter__', + 'get', + 'next', + 'update', + 'save', + 'iteritems', + 'created_at', + 'updated_at') + for method in dict_methods: + self.assertTrue(hasattr(TestModel, method)) diff --git a/tests/sqlalchemy/test_options.py b/tests/sqlalchemy/test_options.py new file mode 100644 index 0000000..4870ed8 --- /dev/null +++ b/tests/sqlalchemy/test_options.py @@ -0,0 +1,120 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslo.config import cfg + +from oslo.db.openstack.common.fixture import config +from tests import utils as test_utils + + +cfg.CONF.import_opt('connection', 'oslo.db.options', + group='database') + + +class DbApiOptionsTestCase(test_utils.BaseTestCase): + def setUp(self): + super(DbApiOptionsTestCase, self).setUp() + + config_fixture = self.useFixture(config.Config()) + self.conf = config_fixture.conf + self.config = config_fixture.config + + def test_deprecated_session_parameters(self): + path = self.create_tempfiles([["tmp", b"""[DEFAULT] +sql_connection=x://y.z +sql_min_pool_size=10 +sql_max_pool_size=20 +sql_max_retries=30 +sql_retry_interval=40 +sql_max_overflow=50 +sql_connection_debug=60 +sql_connection_trace=True +"""]])[0] + self.conf(['--config-file', path]) + self.assertEqual(self.conf.database.connection, 'x://y.z') + self.assertEqual(self.conf.database.min_pool_size, 10) + self.assertEqual(self.conf.database.max_pool_size, 20) + self.assertEqual(self.conf.database.max_retries, 30) + self.assertEqual(self.conf.database.retry_interval, 40) + self.assertEqual(self.conf.database.max_overflow, 50) + self.assertEqual(self.conf.database.connection_debug, 60) + self.assertEqual(self.conf.database.connection_trace, True) + + def test_session_parameters(self): + path = self.create_tempfiles([["tmp", b"""[database] +connection=x://y.z +min_pool_size=10 +max_pool_size=20 +max_retries=30 +retry_interval=40 +max_overflow=50 +connection_debug=60 +connection_trace=True +pool_timeout=7 +"""]])[0] + self.conf(['--config-file', path]) + self.assertEqual(self.conf.database.connection, 'x://y.z') + self.assertEqual(self.conf.database.min_pool_size, 10) + self.assertEqual(self.conf.database.max_pool_size, 20) + self.assertEqual(self.conf.database.max_retries, 30) + self.assertEqual(self.conf.database.retry_interval, 40) + self.assertEqual(self.conf.database.max_overflow, 50) + self.assertEqual(self.conf.database.connection_debug, 60) + self.assertEqual(self.conf.database.connection_trace, True) + self.assertEqual(self.conf.database.pool_timeout, 7) + + def test_dbapi_database_deprecated_parameters(self): + path = self.create_tempfiles([['tmp', b'[DATABASE]\n' + b'sql_connection=fake_connection\n' + b'sql_idle_timeout=100\n' + b'sql_min_pool_size=99\n' + b'sql_max_pool_size=199\n' + b'sql_max_retries=22\n' + b'reconnect_interval=17\n' + b'sqlalchemy_max_overflow=101\n' + b'sqlalchemy_pool_timeout=5\n' + ]])[0] + self.conf(['--config-file', path]) + self.assertEqual(self.conf.database.connection, 'fake_connection') + self.assertEqual(self.conf.database.idle_timeout, 100) + self.assertEqual(self.conf.database.min_pool_size, 99) + self.assertEqual(self.conf.database.max_pool_size, 199) + self.assertEqual(self.conf.database.max_retries, 22) + self.assertEqual(self.conf.database.retry_interval, 17) + self.assertEqual(self.conf.database.max_overflow, 101) + self.assertEqual(self.conf.database.pool_timeout, 5) + + def test_dbapi_database_deprecated_parameters_sql(self): + path = self.create_tempfiles([['tmp', b'[sql]\n' + b'connection=test_sql_connection\n' + b'idle_timeout=99\n' + ]])[0] + self.conf(['--config-file', path]) + self.assertEqual(self.conf.database.connection, 'test_sql_connection') + self.assertEqual(self.conf.database.idle_timeout, 99) + + def test_deprecated_dbapi_parameters(self): + path = self.create_tempfiles([['tmp', b'[DEFAULT]\n' + b'db_backend=test_123\n' + ]])[0] + + self.conf(['--config-file', path]) + self.assertEqual(self.conf.database.backend, 'test_123') + + def test_dbapi_parameters(self): + path = self.create_tempfiles([['tmp', b'[database]\n' + b'backend=test_123\n' + ]])[0] + + self.conf(['--config-file', path]) + self.assertEqual(self.conf.database.backend, 'test_123') diff --git a/tests/sqlalchemy/test_sqlalchemy.py b/tests/sqlalchemy/test_sqlalchemy.py new file mode 100644 index 0000000..9cda622 --- /dev/null +++ b/tests/sqlalchemy/test_sqlalchemy.py @@ -0,0 +1,542 @@ +# encoding=UTF8 + +# Copyright (c) 2012 Rackspace Hosting +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Unit tests for SQLAlchemy specific code.""" +import logging + +import _mysql_exceptions +import fixtures +import mock +from oslotest import base as oslo_test +import sqlalchemy +from sqlalchemy import Column, MetaData, Table, UniqueConstraint +from sqlalchemy import DateTime, Integer, String +from sqlalchemy import exc as sqla_exc +from sqlalchemy.exc import DataError +from sqlalchemy.ext.declarative import declarative_base + +from oslo.db import exception as db_exc +from oslo.db.sqlalchemy import models +from oslo.db.sqlalchemy import session +from oslo.db.sqlalchemy import test_base + + +BASE = declarative_base() +_TABLE_NAME = '__tmp__test__tmp__' + + +class TmpTable(BASE, models.ModelBase): + __tablename__ = _TABLE_NAME + id = Column(Integer, primary_key=True) + foo = Column(Integer) + + +class SessionErrorWrapperTestCase(test_base.DbTestCase): + def setUp(self): + super(SessionErrorWrapperTestCase, self).setUp() + meta = MetaData() + meta.bind = self.engine + test_table = Table(_TABLE_NAME, meta, + Column('id', Integer, primary_key=True, + nullable=False), + Column('deleted', Integer, default=0), + Column('deleted_at', DateTime), + Column('updated_at', DateTime), + Column('created_at', DateTime), + Column('foo', Integer), + UniqueConstraint('foo', name='uniq_foo')) + test_table.create() + self.addCleanup(test_table.drop) + + def test_flush_wrapper(self): + _session = self.sessionmaker() + + tbl = TmpTable() + tbl.update({'foo': 10}) + tbl.save(_session) + + tbl2 = TmpTable() + tbl2.update({'foo': 10}) + self.assertRaises(db_exc.DBDuplicateEntry, tbl2.save, _session) + + def test_execute_wrapper(self): + _session = self.sessionmaker() + with _session.begin(): + for i in [10, 20]: + tbl = TmpTable() + tbl.update({'foo': i}) + tbl.save(session=_session) + + method = _session.query(TmpTable).\ + filter_by(foo=10).\ + update + self.assertRaises(db_exc.DBDuplicateEntry, + method, {'foo': 20}) + + def test_ibm_db_sa_raise_if_duplicate_entry_error_duplicate(self): + # Tests that the session._raise_if_duplicate_entry_error method + # translates the duplicate entry integrity error for the DB2 engine. + statement = ('INSERT INTO key_pairs (created_at, updated_at, ' + 'deleted_at, deleted, name, user_id, fingerprint) VALUES ' + '(?, ?, ?, ?, ?, ?, ?)') + params = ['20130918001123627099', None, None, 0, 'keypair-23474772', + '974a7c9ffde6419f9811fcf94a917f47', + '7d:2c:58:7f:97:66:14:3f:27:c7:09:3c:26:95:66:4d'] + orig = sqla_exc.SQLAlchemyError( + 'SQL0803N One or more values in the INSERT statement, UPDATE ' + 'statement, or foreign key update caused by a DELETE statement are' + ' not valid because the primary key, unique constraint or unique ' + 'index identified by "2" constrains table "NOVA.KEY_PAIRS" from ' + 'having duplicate values for the index key.') + integrity_error = sqla_exc.IntegrityError(statement, params, orig) + self.assertRaises(db_exc.DBDuplicateEntry, + session._raise_if_duplicate_entry_error, + integrity_error, 'ibm_db_sa') + + def test_ibm_db_sa_raise_if_duplicate_entry_error_no_match(self): + # Tests that the session._raise_if_duplicate_entry_error method + # does not raise a DBDuplicateEntry exception when it's not a matching + # integrity error. + statement = ('ALTER TABLE instance_types ADD CONSTRAINT ' + 'uniq_name_x_deleted UNIQUE (name, deleted)') + params = None + orig = sqla_exc.SQLAlchemyError( + 'SQL0542N The column named "NAME" cannot be a column of a ' + 'primary key or unique key constraint because it can contain null ' + 'values.') + integrity_error = sqla_exc.IntegrityError(statement, params, orig) + session._raise_if_duplicate_entry_error(integrity_error, 'ibm_db_sa') + + +_REGEXP_TABLE_NAME = _TABLE_NAME + "regexp" + + +class RegexpTable(BASE, models.ModelBase): + __tablename__ = _REGEXP_TABLE_NAME + id = Column(Integer, primary_key=True) + bar = Column(String(255)) + + +class RegexpFilterTestCase(test_base.DbTestCase): + + def setUp(self): + super(RegexpFilterTestCase, self).setUp() + meta = MetaData() + meta.bind = self.engine + test_table = Table(_REGEXP_TABLE_NAME, meta, + Column('id', Integer, primary_key=True, + nullable=False), + Column('bar', String(255))) + test_table.create() + self.addCleanup(test_table.drop) + + def _test_regexp_filter(self, regexp, expected): + _session = self.sessionmaker() + with _session.begin(): + for i in ['10', '20', u'♥']: + tbl = RegexpTable() + tbl.update({'bar': i}) + tbl.save(session=_session) + + regexp_op = RegexpTable.bar.op('REGEXP')(regexp) + result = _session.query(RegexpTable).filter(regexp_op).all() + self.assertEqual([r.bar for r in result], expected) + + def test_regexp_filter(self): + self._test_regexp_filter('10', ['10']) + + def test_regexp_filter_nomatch(self): + self._test_regexp_filter('11', []) + + def test_regexp_filter_unicode(self): + self._test_regexp_filter(u'♥', [u'♥']) + + def test_regexp_filter_unicode_nomatch(self): + self._test_regexp_filter(u'♦', []) + + +class FakeDBAPIConnection(): + def cursor(self): + return FakeCursor() + + +class FakeCursor(): + def execute(self, sql): + pass + + +class FakeConnectionProxy(): + pass + + +class FakeConnectionRec(): + pass + + +class OperationalError(Exception): + pass + + +class ProgrammingError(Exception): + pass + + +class FakeDB2Engine(object): + + class Dialect(): + + def is_disconnect(self, e, *args): + expected_error = ('SQL30081N: DB2 Server connection is no longer ' + 'active') + return (str(e) == expected_error) + + dialect = Dialect() + name = 'ibm_db_sa' + + def dispose(self): + pass + + +class TestDBDisconnected(oslo_test.BaseTestCase): + + def _test_ping_listener_disconnected(self, connection): + engine_args = { + 'pool_recycle': 3600, + 'echo': False, + 'convert_unicode': True} + + engine = sqlalchemy.create_engine(connection, **engine_args) + with mock.patch.object(engine, 'dispose') as dispose_mock: + self.assertRaises(sqlalchemy.exc.DisconnectionError, + session._ping_listener, engine, + FakeDBAPIConnection(), FakeConnectionRec(), + FakeConnectionProxy()) + dispose_mock.assert_called_once_with() + + def test_mysql_ping_listener_disconnected(self): + def fake_execute(sql): + raise _mysql_exceptions.OperationalError(self.mysql_error, + ('MySQL server has ' + 'gone away')) + with mock.patch.object(FakeCursor, 'execute', + side_effect=fake_execute): + connection = 'mysql://root:password@fakehost/fakedb?charset=utf8' + for code in [2006, 2013, 2014, 2045, 2055]: + self.mysql_error = code + self._test_ping_listener_disconnected(connection) + + def test_db2_ping_listener_disconnected(self): + + def fake_execute(sql): + raise OperationalError('SQL30081N: DB2 Server ' + 'connection is no longer active') + with mock.patch.object(FakeCursor, 'execute', + side_effect=fake_execute): + # TODO(dperaza): Need a fake engine for db2 since ibm_db_sa is not + # in global requirements. Change this code to use real IBM db2 + # engine as soon as ibm_db_sa is included in global-requirements + # under openstack/requirements project. + fake_create_engine = lambda *args, **kargs: FakeDB2Engine() + with mock.patch.object(sqlalchemy, 'create_engine', + side_effect=fake_create_engine): + connection = ('ibm_db_sa://db2inst1:openstack@fakehost:50000' + '/fakedab') + self._test_ping_listener_disconnected(connection) + + +class MySQLModeTestCase(test_base.MySQLOpportunisticTestCase): + + def __init__(self, *args, **kwargs): + super(MySQLModeTestCase, self).__init__(*args, **kwargs) + # By default, run in empty SQL mode. + # Subclasses override this with specific modes. + self.mysql_mode = '' + + def setUp(self): + super(MySQLModeTestCase, self).setUp() + + self.engine = session.create_engine(self.engine.url, + mysql_sql_mode=self.mysql_mode) + self.connection = self.engine.connect() + + meta = MetaData() + meta.bind = self.engine + self.test_table = Table(_TABLE_NAME + "mode", meta, + Column('id', Integer, primary_key=True), + Column('bar', String(255))) + self.test_table.create() + + self.addCleanup(self.test_table.drop) + self.addCleanup(self.connection.close) + + def _test_string_too_long(self, value): + with self.connection.begin(): + self.connection.execute(self.test_table.insert(), + bar=value) + result = self.connection.execute(self.test_table.select()) + return result.fetchone()['bar'] + + def test_string_too_long(self): + value = 'a' * 512 + # String is too long. + # With no SQL mode set, this gets truncated. + self.assertNotEqual(value, + self._test_string_too_long(value)) + + +class MySQLStrictAllTablesModeTestCase(MySQLModeTestCase): + "Test data integrity enforcement in MySQL STRICT_ALL_TABLES mode." + + def __init__(self, *args, **kwargs): + super(MySQLStrictAllTablesModeTestCase, self).__init__(*args, **kwargs) + self.mysql_mode = 'STRICT_ALL_TABLES' + + def test_string_too_long(self): + value = 'a' * 512 + # String is too long. + # With STRICT_ALL_TABLES or TRADITIONAL mode set, this is an error. + self.assertRaises(DataError, + self._test_string_too_long, value) + + +class MySQLTraditionalModeTestCase(MySQLStrictAllTablesModeTestCase): + """Test data integrity enforcement in MySQL TRADITIONAL mode. + Since TRADITIONAL includes STRICT_ALL_TABLES, this inherits all + STRICT_ALL_TABLES mode tests. + """ + + def __init__(self, *args, **kwargs): + super(MySQLTraditionalModeTestCase, self).__init__(*args, **kwargs) + self.mysql_mode = 'TRADITIONAL' + + +class EngineFacadeTestCase(oslo_test.BaseTestCase): + def setUp(self): + super(EngineFacadeTestCase, self).setUp() + + self.facade = session.EngineFacade('sqlite://') + + def test_get_engine(self): + eng1 = self.facade.get_engine() + eng2 = self.facade.get_engine() + + self.assertIs(eng1, eng2) + + def test_get_session(self): + ses1 = self.facade.get_session() + ses2 = self.facade.get_session() + + self.assertIsNot(ses1, ses2) + + def test_get_session_arguments_override_default_settings(self): + ses = self.facade.get_session(autocommit=False, expire_on_commit=True) + + self.assertFalse(ses.autocommit) + self.assertTrue(ses.expire_on_commit) + + @mock.patch('oslo.db.sqlalchemy.session.get_maker') + @mock.patch('oslo.db.sqlalchemy.session.create_engine') + def test_creation_from_config(self, create_engine, get_maker): + conf = mock.MagicMock() + conf.database.items.return_value = [ + ('connection_debug', 100), + ('max_pool_size', 10), + ('mysql_sql_mode', 'TRADITIONAL'), + ] + + session.EngineFacade.from_config('sqlite:///:memory:', conf, + autocommit=False, + expire_on_commit=True) + + conf.database.items.assert_called_once_with() + create_engine.assert_called_once_with( + sql_connection='sqlite:///:memory:', + connection_debug=100, + max_pool_size=10, + mysql_sql_mode='TRADITIONAL', + sqlite_fk=False, + idle_timeout=mock.ANY, + retry_interval=mock.ANY, + max_retries=mock.ANY, + max_overflow=mock.ANY, + connection_trace=mock.ANY, + sqlite_synchronous=mock.ANY, + pool_timeout=mock.ANY, + ) + get_maker.assert_called_once_with(engine=create_engine(), + autocommit=False, + expire_on_commit=True) + + +class MysqlSetCallbackTest(oslo_test.BaseTestCase): + + class FakeCursor(object): + def __init__(self, execs): + self._execs = execs + + def execute(self, sql, arg): + self._execs.append(sql % arg) + + class FakeDbapiCon(object): + def __init__(self, execs): + self._execs = execs + + def cursor(self): + return MysqlSetCallbackTest.FakeCursor(self._execs) + + class FakeResultSet(object): + def __init__(self, realmode): + self._realmode = realmode + + def fetchone(self): + return ['ignored', self._realmode] + + class FakeEngine(object): + def __init__(self, realmode=None): + self._cbs = {} + self._execs = [] + self._realmode = realmode + self._connected = False + + def set_callback(self, name, cb): + self._cbs[name] = cb + + def connect(self, **kwargs): + cb = self._cbs.get('connect', lambda *x, **y: None) + dbapi_con = MysqlSetCallbackTest.FakeDbapiCon(self._execs) + connection_rec = None # Not used. + cb(dbapi_con, connection_rec) + + def execute(self, sql): + if not self._connected: + self.connect() + self._connected = True + self._execs.append(sql) + return MysqlSetCallbackTest.FakeResultSet(self._realmode) + + def stub_listen(engine, name, cb): + engine.set_callback(name, cb) + + @mock.patch.object(sqlalchemy.event, 'listen', side_effect=stub_listen) + def _call_set_callback(self, listen_mock, sql_mode=None, realmode=None): + engine = self.FakeEngine(realmode=realmode) + + self.stream = self.useFixture(fixtures.FakeLogger( + format="%(levelname)8s [%(name)s] %(message)s", + level=logging.DEBUG, + nuke_handlers=True + )) + + session._mysql_set_mode_callback(engine, sql_mode=sql_mode) + return engine + + def test_set_mode_traditional(self): + # If _mysql_set_mode_callback is called with an sql_mode, then the SQL + # mode is set on the connection. + + engine = self._call_set_callback(sql_mode='TRADITIONAL') + + exp_calls = [ + "SET SESSION sql_mode = ['TRADITIONAL']", + "SHOW VARIABLES LIKE 'sql_mode'" + ] + self.assertEqual(exp_calls, engine._execs) + + def test_set_mode_ansi(self): + # If _mysql_set_mode_callback is called with an sql_mode, then the SQL + # mode is set on the connection. + + engine = self._call_set_callback(sql_mode='ANSI') + + exp_calls = [ + "SET SESSION sql_mode = ['ANSI']", + "SHOW VARIABLES LIKE 'sql_mode'" + ] + self.assertEqual(exp_calls, engine._execs) + + def test_set_mode_no_mode(self): + # If _mysql_set_mode_callback is called with sql_mode=None, then + # the SQL mode is NOT set on the connection. + + engine = self._call_set_callback() + + exp_calls = [ + "SHOW VARIABLES LIKE 'sql_mode'" + ] + self.assertEqual(exp_calls, engine._execs) + + def test_fail_detect_mode(self): + # If "SHOW VARIABLES LIKE 'sql_mode'" results in no row, then + # we get a log indicating can't detect the mode. + + self._call_set_callback() + + self.assertIn('Unable to detect effective SQL mode', + self.stream.output) + + def test_logs_real_mode(self): + # If "SHOW VARIABLES LIKE 'sql_mode'" results in a value, then + # we get a log with the value. + + self._call_set_callback(realmode='SOMETHING') + + self.assertIn('MySQL server mode set to SOMETHING', + self.stream.output) + + def test_warning_when_not_traditional(self): + # If "SHOW VARIABLES LIKE 'sql_mode'" results in a value that doesn't + # include 'TRADITIONAL', then a warning is logged. + + self._call_set_callback(realmode='NOT_TRADIT') + + self.assertIn("consider enabling TRADITIONAL or STRICT_ALL_TABLES", + self.stream.output) + + def test_no_warning_when_traditional(self): + # If "SHOW VARIABLES LIKE 'sql_mode'" results in a value that includes + # 'TRADITIONAL', then no warning is logged. + + self._call_set_callback(realmode='TRADITIONAL') + + self.assertNotIn("consider enabling TRADITIONAL or STRICT_ALL_TABLES", + self.stream.output) + + def test_no_warning_when_strict_all_tables(self): + # If "SHOW VARIABLES LIKE 'sql_mode'" results in a value that includes + # 'STRICT_ALL_TABLES', then no warning is logged. + + self._call_set_callback(realmode='STRICT_ALL_TABLES') + + self.assertNotIn("consider enabling TRADITIONAL or STRICT_ALL_TABLES", + self.stream.output) + + def test_multiple_executes(self): + # We should only set the sql_mode on a connection once. + + engine = self._call_set_callback(sql_mode='TRADITIONAL', + realmode='TRADITIONAL') + + engine.execute('SELECT * FROM foo') + engine.execute('SELECT * FROM bar') + + exp_calls = [ + "SET SESSION sql_mode = ['TRADITIONAL']", + "SHOW VARIABLES LIKE 'sql_mode'", + "SELECT * FROM foo", + "SELECT * FROM bar", + ] + self.assertEqual(exp_calls, engine._execs) diff --git a/tests/sqlalchemy/test_utils.py b/tests/sqlalchemy/test_utils.py new file mode 100644 index 0000000..e4704fb --- /dev/null +++ b/tests/sqlalchemy/test_utils.py @@ -0,0 +1,838 @@ +# Copyright (c) 2013 Boris Pavlovic (boris@pavlovic.me). +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import uuid +import warnings + +from migrate.changeset import UniqueConstraint +import mock +from oslotest import base as test_base +import six +from six import moves +from six.moves.urllib import parse +import sqlalchemy +from sqlalchemy.dialects import mysql +from sqlalchemy import Boolean, Index, Integer, DateTime, String +from sqlalchemy import MetaData, Table, Column, ForeignKey +from sqlalchemy.engine import reflection +from sqlalchemy.exc import SAWarning, OperationalError +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.sql import select +from sqlalchemy.types import UserDefinedType, NullType + +from oslo.db import exception +from oslo.db.openstack.common.fixture import moxstubout +from oslo.db.sqlalchemy import migration +from oslo.db.sqlalchemy import models +from oslo.db.sqlalchemy import session +from oslo.db.sqlalchemy import test_migrations +from oslo.db.sqlalchemy import utils +from tests import utils as test_utils + + +SA_VERSION = tuple(map(int, sqlalchemy.__version__.split('.'))) + + +class TestSanitizeDbUrl(test_base.BaseTestCase): + + def test_url_with_cred(self): + db_url = 'myproto://johndoe:secret@localhost/myschema' + expected = 'myproto://****:****@localhost/myschema' + actual = utils.sanitize_db_url(db_url) + self.assertEqual(expected, actual) + + def test_url_with_no_cred(self): + db_url = 'sqlite:///mysqlitefile' + actual = utils.sanitize_db_url(db_url) + self.assertEqual(db_url, actual) + + +class CustomType(UserDefinedType): + """Dummy column type for testing unsupported types.""" + def get_col_spec(self): + return "CustomType" + + +class FakeModel(object): + def __init__(self, values): + self.values = values + + def __getattr__(self, name): + try: + value = self.values[name] + except KeyError: + raise AttributeError(name) + return value + + def __getitem__(self, key): + if key in self.values: + return self.values[key] + else: + raise NotImplementedError() + + def __repr__(self): + return '' % self.values + + +class TestPaginateQuery(test_base.BaseTestCase): + def setUp(self): + super(TestPaginateQuery, self).setUp() + mox_fixture = self.useFixture(moxstubout.MoxStubout()) + self.mox = mox_fixture.mox + self.query = self.mox.CreateMockAnything() + self.mox.StubOutWithMock(sqlalchemy, 'asc') + self.mox.StubOutWithMock(sqlalchemy, 'desc') + self.marker = FakeModel({ + 'user_id': 'user', + 'project_id': 'p', + 'snapshot_id': 's', + }) + self.model = FakeModel({ + 'user_id': 'user', + 'project_id': 'project', + 'snapshot_id': 'snapshot', + }) + + def test_paginate_query_no_pagination_no_sort_dirs(self): + sqlalchemy.asc('user').AndReturn('asc_3') + self.query.order_by('asc_3').AndReturn(self.query) + sqlalchemy.asc('project').AndReturn('asc_2') + self.query.order_by('asc_2').AndReturn(self.query) + sqlalchemy.asc('snapshot').AndReturn('asc_1') + self.query.order_by('asc_1').AndReturn(self.query) + self.query.limit(5).AndReturn(self.query) + self.mox.ReplayAll() + utils.paginate_query(self.query, self.model, 5, + ['user_id', 'project_id', 'snapshot_id']) + + def test_paginate_query_no_pagination(self): + sqlalchemy.asc('user').AndReturn('asc') + self.query.order_by('asc').AndReturn(self.query) + sqlalchemy.desc('project').AndReturn('desc') + self.query.order_by('desc').AndReturn(self.query) + self.query.limit(5).AndReturn(self.query) + self.mox.ReplayAll() + utils.paginate_query(self.query, self.model, 5, + ['user_id', 'project_id'], + sort_dirs=['asc', 'desc']) + + def test_paginate_query_attribute_error(self): + sqlalchemy.asc('user').AndReturn('asc') + self.query.order_by('asc').AndReturn(self.query) + self.mox.ReplayAll() + self.assertRaises(utils.InvalidSortKey, + utils.paginate_query, self.query, + self.model, 5, ['user_id', 'non-existent key']) + + def test_paginate_query_assertion_error(self): + self.mox.ReplayAll() + self.assertRaises(AssertionError, + utils.paginate_query, self.query, + self.model, 5, ['user_id'], + marker=self.marker, + sort_dir='asc', sort_dirs=['asc']) + + def test_paginate_query_assertion_error_2(self): + self.mox.ReplayAll() + self.assertRaises(AssertionError, + utils.paginate_query, self.query, + self.model, 5, ['user_id'], + marker=self.marker, + sort_dir=None, sort_dirs=['asc', 'desk']) + + def test_paginate_query(self): + sqlalchemy.asc('user').AndReturn('asc_1') + self.query.order_by('asc_1').AndReturn(self.query) + sqlalchemy.desc('project').AndReturn('desc_1') + self.query.order_by('desc_1').AndReturn(self.query) + self.mox.StubOutWithMock(sqlalchemy.sql, 'and_') + sqlalchemy.sql.and_(False).AndReturn('some_crit') + sqlalchemy.sql.and_(True, False).AndReturn('another_crit') + self.mox.StubOutWithMock(sqlalchemy.sql, 'or_') + sqlalchemy.sql.or_('some_crit', 'another_crit').AndReturn('some_f') + self.query.filter('some_f').AndReturn(self.query) + self.query.limit(5).AndReturn(self.query) + self.mox.ReplayAll() + utils.paginate_query(self.query, self.model, 5, + ['user_id', 'project_id'], + marker=self.marker, + sort_dirs=['asc', 'desc']) + + def test_paginate_query_value_error(self): + sqlalchemy.asc('user').AndReturn('asc_1') + self.query.order_by('asc_1').AndReturn(self.query) + self.mox.ReplayAll() + self.assertRaises(ValueError, utils.paginate_query, + self.query, self.model, 5, ['user_id', 'project_id'], + marker=self.marker, sort_dirs=['asc', 'mixed']) + + +class TestMigrationUtils(test_migrations.BaseMigrationTestCase): + """Class for testing utils that are used in db migrations.""" + + def setUp(self): + super(TestMigrationUtils, self).setUp() + migration.patch_migrate() + + def _populate_db_for_drop_duplicate_entries(self, engine, meta, + table_name): + values = [ + {'id': 11, 'a': 3, 'b': 10, 'c': 'abcdef'}, + {'id': 12, 'a': 5, 'b': 10, 'c': 'abcdef'}, + {'id': 13, 'a': 6, 'b': 10, 'c': 'abcdef'}, + {'id': 14, 'a': 7, 'b': 10, 'c': 'abcdef'}, + {'id': 21, 'a': 1, 'b': 20, 'c': 'aa'}, + {'id': 31, 'a': 1, 'b': 20, 'c': 'bb'}, + {'id': 41, 'a': 1, 'b': 30, 'c': 'aef'}, + {'id': 42, 'a': 2, 'b': 30, 'c': 'aef'}, + {'id': 43, 'a': 3, 'b': 30, 'c': 'aef'} + ] + + test_table = Table(table_name, meta, + Column('id', Integer, primary_key=True, + nullable=False), + Column('a', Integer), + Column('b', Integer), + Column('c', String(255)), + Column('deleted', Integer, default=0), + Column('deleted_at', DateTime), + Column('updated_at', DateTime)) + + test_table.create() + engine.execute(test_table.insert(), values) + return test_table, values + + def test_drop_old_duplicate_entries_from_table(self): + table_name = "__test_tmp_table__" + + for engine in self.engines.values(): + meta = MetaData() + meta.bind = engine + test_table, values = self._populate_db_for_drop_duplicate_entries( + engine, meta, table_name) + utils.drop_old_duplicate_entries_from_table( + engine, table_name, False, 'b', 'c') + + uniq_values = set() + expected_ids = [] + for value in sorted(values, key=lambda x: x['id'], reverse=True): + uniq_value = (('b', value['b']), ('c', value['c'])) + if uniq_value in uniq_values: + continue + uniq_values.add(uniq_value) + expected_ids.append(value['id']) + + real_ids = [row[0] for row in + engine.execute(select([test_table.c.id])).fetchall()] + + self.assertEqual(len(real_ids), len(expected_ids)) + for id_ in expected_ids: + self.assertTrue(id_ in real_ids) + + def test_drop_old_duplicate_entries_from_table_soft_delete(self): + table_name = "__test_tmp_table__" + + for engine in self.engines.values(): + meta = MetaData() + meta.bind = engine + table, values = self._populate_db_for_drop_duplicate_entries( + engine, meta, table_name) + utils.drop_old_duplicate_entries_from_table(engine, table_name, + True, 'b', 'c') + uniq_values = set() + expected_values = [] + soft_deleted_values = [] + + for value in sorted(values, key=lambda x: x['id'], reverse=True): + uniq_value = (('b', value['b']), ('c', value['c'])) + if uniq_value in uniq_values: + soft_deleted_values.append(value) + continue + uniq_values.add(uniq_value) + expected_values.append(value) + + base_select = table.select() + + rows_select = base_select.where(table.c.deleted != table.c.id) + row_ids = [row['id'] for row in + engine.execute(rows_select).fetchall()] + self.assertEqual(len(row_ids), len(expected_values)) + for value in expected_values: + self.assertTrue(value['id'] in row_ids) + + deleted_rows_select = base_select.where( + table.c.deleted == table.c.id) + deleted_rows_ids = [row['id'] for row in + engine.execute(deleted_rows_select).fetchall()] + self.assertEqual(len(deleted_rows_ids), + len(values) - len(row_ids)) + for value in soft_deleted_values: + self.assertTrue(value['id'] in deleted_rows_ids) + + def test_change_deleted_column_type_does_not_drop_index(self): + table_name = 'abc' + for engine in self.engines.values(): + meta = MetaData(bind=engine) + + indexes = { + 'idx_a_deleted': ['a', 'deleted'], + 'idx_b_deleted': ['b', 'deleted'], + 'idx_a': ['a'] + } + + index_instances = [Index(name, *columns) + for name, columns in six.iteritems(indexes)] + + table = Table(table_name, meta, + Column('id', Integer, primary_key=True), + Column('a', String(255)), + Column('b', String(255)), + Column('deleted', Boolean), + *index_instances) + table.create() + utils.change_deleted_column_type_to_id_type(engine, table_name) + utils.change_deleted_column_type_to_boolean(engine, table_name) + + insp = reflection.Inspector.from_engine(engine) + real_indexes = insp.get_indexes(table_name) + self.assertEqual(len(real_indexes), 3) + for index in real_indexes: + name = index['name'] + self.assertIn(name, indexes) + self.assertEqual(set(index['column_names']), + set(indexes[name])) + + def test_change_deleted_column_type_to_id_type_integer(self): + table_name = 'abc' + for engine in self.engines.values(): + meta = MetaData() + meta.bind = engine + table = Table(table_name, meta, + Column('id', Integer, primary_key=True), + Column('deleted', Boolean)) + table.create() + utils.change_deleted_column_type_to_id_type(engine, table_name) + + table = utils.get_table(engine, table_name) + self.assertTrue(isinstance(table.c.deleted.type, Integer)) + + def test_change_deleted_column_type_to_id_type_string(self): + table_name = 'abc' + for engine in self.engines.values(): + meta = MetaData() + meta.bind = engine + table = Table(table_name, meta, + Column('id', String(255), primary_key=True), + Column('deleted', Boolean)) + table.create() + utils.change_deleted_column_type_to_id_type(engine, table_name) + + table = utils.get_table(engine, table_name) + self.assertTrue(isinstance(table.c.deleted.type, String)) + + def test_change_deleted_column_type_to_id_type_custom(self): + table_name = 'abc' + engine = self.engines['sqlite'] + meta = MetaData() + meta.bind = engine + table = Table(table_name, meta, + Column('id', Integer, primary_key=True), + Column('foo', CustomType), + Column('deleted', Boolean)) + table.create() + + # reflection of custom types has been fixed upstream + if SA_VERSION < (0, 9, 0): + self.assertRaises(utils.ColumnError, + utils.change_deleted_column_type_to_id_type, + engine, table_name) + + fooColumn = Column('foo', CustomType()) + utils.change_deleted_column_type_to_id_type(engine, table_name, + foo=fooColumn) + + table = utils.get_table(engine, table_name) + # NOTE(boris-42): There is no way to check has foo type CustomType. + # but sqlalchemy will set it to NullType. This has + # been fixed upstream in recent SA versions + if SA_VERSION < (0, 9, 0): + self.assertTrue(isinstance(table.c.foo.type, NullType)) + self.assertTrue(isinstance(table.c.deleted.type, Integer)) + + def test_change_deleted_column_type_to_boolean(self): + table_name = 'abc' + for key, engine in self.engines.items(): + meta = MetaData() + meta.bind = engine + table = Table(table_name, meta, + Column('id', Integer, primary_key=True), + Column('deleted', Integer)) + table.create() + + utils.change_deleted_column_type_to_boolean(engine, table_name) + + table = utils.get_table(engine, table_name) + expected_type = Boolean if key != "mysql" else mysql.TINYINT + self.assertTrue(isinstance(table.c.deleted.type, expected_type)) + + def test_change_deleted_column_type_to_boolean_with_fc(self): + table_name_1 = 'abc' + table_name_2 = 'bcd' + for key, engine in self.engines.items(): + meta = MetaData() + meta.bind = engine + + table_1 = Table(table_name_1, meta, + Column('id', Integer, primary_key=True), + Column('deleted', Integer)) + table_1.create() + + table_2 = Table(table_name_2, meta, + Column('id', Integer, primary_key=True), + Column('foreign_id', Integer, + ForeignKey('%s.id' % table_name_1)), + Column('deleted', Integer)) + table_2.create() + + utils.change_deleted_column_type_to_boolean(engine, table_name_2) + + table = utils.get_table(engine, table_name_2) + expected_type = Boolean if key != "mysql" else mysql.TINYINT + self.assertTrue(isinstance(table.c.deleted.type, expected_type)) + + def test_change_deleted_column_type_to_boolean_type_custom(self): + table_name = 'abc' + engine = self.engines['sqlite'] + meta = MetaData() + meta.bind = engine + table = Table(table_name, meta, + Column('id', Integer, primary_key=True), + Column('foo', CustomType), + Column('deleted', Integer)) + table.create() + + # reflection of custom types has been fixed upstream + if SA_VERSION < (0, 9, 0): + self.assertRaises(utils.ColumnError, + utils.change_deleted_column_type_to_boolean, + engine, table_name) + + fooColumn = Column('foo', CustomType()) + utils.change_deleted_column_type_to_boolean(engine, table_name, + foo=fooColumn) + + table = utils.get_table(engine, table_name) + # NOTE(boris-42): There is no way to check has foo type CustomType. + # but sqlalchemy will set it to NullType. This has + # been fixed upstream in recent SA versions + if SA_VERSION < (0, 9, 0): + self.assertTrue(isinstance(table.c.foo.type, NullType)) + self.assertTrue(isinstance(table.c.deleted.type, Boolean)) + + def test_utils_drop_unique_constraint(self): + table_name = "__test_tmp_table__" + uc_name = 'uniq_foo' + values = [ + {'id': 1, 'a': 3, 'foo': 10}, + {'id': 2, 'a': 2, 'foo': 20}, + {'id': 3, 'a': 1, 'foo': 30}, + ] + for engine in self.engines.values(): + meta = MetaData() + meta.bind = engine + test_table = Table( + table_name, meta, + Column('id', Integer, primary_key=True, nullable=False), + Column('a', Integer), + Column('foo', Integer), + UniqueConstraint('a', name='uniq_a'), + UniqueConstraint('foo', name=uc_name), + ) + test_table.create() + + engine.execute(test_table.insert(), values) + # NOTE(boris-42): This method is generic UC dropper. + utils.drop_unique_constraint(engine, table_name, uc_name, 'foo') + + s = test_table.select().order_by(test_table.c.id) + rows = engine.execute(s).fetchall() + + for i in moves.range(len(values)): + v = values[i] + self.assertEqual((v['id'], v['a'], v['foo']), rows[i]) + + # NOTE(boris-42): Update data about Table from DB. + meta = MetaData() + meta.bind = engine + test_table = Table(table_name, meta, autoload=True) + constraints = [c for c in test_table.constraints + if c.name == uc_name] + self.assertEqual(len(constraints), 0) + self.assertEqual(len(test_table.constraints), 1) + + test_table.drop() + + def test_util_drop_unique_constraint_with_not_supported_sqlite_type(self): + table_name = "__test_tmp_table__" + uc_name = 'uniq_foo' + values = [ + {'id': 1, 'a': 3, 'foo': 10}, + {'id': 2, 'a': 2, 'foo': 20}, + {'id': 3, 'a': 1, 'foo': 30} + ] + + engine = self.engines['sqlite'] + meta = MetaData(bind=engine) + + test_table = Table( + table_name, meta, + Column('id', Integer, primary_key=True, nullable=False), + Column('a', Integer), + Column('foo', CustomType, default=0), + UniqueConstraint('a', name='uniq_a'), + UniqueConstraint('foo', name=uc_name), + ) + test_table.create() + + engine.execute(test_table.insert(), values) + warnings.simplefilter("ignore", SAWarning) + + # reflection of custom types has been fixed upstream + if SA_VERSION < (0, 9, 0): + # NOTE(boris-42): Missing info about column `foo` that has + # unsupported type CustomType. + self.assertRaises(utils.ColumnError, + utils.drop_unique_constraint, + engine, table_name, uc_name, 'foo') + + # NOTE(boris-42): Wrong type of foo instance. it should be + # instance of sqlalchemy.Column. + self.assertRaises(utils.ColumnError, + utils.drop_unique_constraint, + engine, table_name, uc_name, 'foo', + foo=Integer()) + + foo = Column('foo', CustomType, default=0) + utils.drop_unique_constraint( + engine, table_name, uc_name, 'foo', foo=foo) + + s = test_table.select().order_by(test_table.c.id) + rows = engine.execute(s).fetchall() + + for i in moves.range(len(values)): + v = values[i] + self.assertEqual((v['id'], v['a'], v['foo']), rows[i]) + + # NOTE(boris-42): Update data about Table from DB. + meta = MetaData(bind=engine) + test_table = Table(table_name, meta, autoload=True) + constraints = [c for c in test_table.constraints if c.name == uc_name] + self.assertEqual(len(constraints), 0) + self.assertEqual(len(test_table.constraints), 1) + test_table.drop() + + def test_drop_unique_constraint_in_sqlite_fk_recreate(self): + engine = self.engines['sqlite'] + meta = MetaData() + meta.bind = engine + parent_table = Table( + 'table0', meta, + Column('id', Integer, primary_key=True), + Column('foo', Integer), + ) + parent_table.create() + table_name = 'table1' + table = Table( + table_name, meta, + Column('id', Integer, primary_key=True), + Column('baz', Integer), + Column('bar', Integer, ForeignKey("table0.id")), + UniqueConstraint('baz', name='constr1') + ) + table.create() + utils.drop_unique_constraint(engine, table_name, 'constr1', 'baz') + + insp = reflection.Inspector.from_engine(engine) + f_keys = insp.get_foreign_keys(table_name) + self.assertEqual(len(f_keys), 1) + f_key = f_keys[0] + self.assertEqual(f_key['referred_table'], 'table0') + self.assertEqual(f_key['referred_columns'], ['id']) + self.assertEqual(f_key['constrained_columns'], ['bar']) + + def test_insert_from_select(self): + insert_table_name = "__test_insert_to_table__" + select_table_name = "__test_select_from_table__" + uuidstrs = [] + for unused in range(10): + uuidstrs.append(uuid.uuid4().hex) + for key, engine in self.engines.items(): + meta = MetaData() + meta.bind = engine + conn = engine.connect() + insert_table = Table( + insert_table_name, meta, + Column('id', Integer, primary_key=True, + nullable=False, autoincrement=True), + Column('uuid', String(36), nullable=False)) + select_table = Table( + select_table_name, meta, + Column('id', Integer, primary_key=True, + nullable=False, autoincrement=True), + Column('uuid', String(36), nullable=False)) + + insert_table.create() + select_table.create() + # Add 10 rows to select_table + for uuidstr in uuidstrs: + ins_stmt = select_table.insert().values(uuid=uuidstr) + conn.execute(ins_stmt) + + # Select 4 rows in one chunk from select_table + column = select_table.c.id + query_insert = select([select_table], + select_table.c.id < 5).order_by(column) + insert_statement = utils.InsertFromSelect(insert_table, + query_insert) + result_insert = conn.execute(insert_statement) + # Verify we insert 4 rows + self.assertEqual(result_insert.rowcount, 4) + + query_all = select([insert_table]).where( + insert_table.c.uuid.in_(uuidstrs)) + rows = conn.execute(query_all).fetchall() + # Verify we really have 4 rows in insert_table + self.assertEqual(len(rows), 4) + + insert_table.drop() + select_table.drop() + + +class TestConnectionUtils(test_utils.BaseTestCase): + + def setUp(self): + super(TestConnectionUtils, self).setUp() + + self.full_credentials = {'backend': 'mysql', + 'database': 'test', + 'user': 'dude', + 'passwd': 'pass'} + + self.connect_string = 'mysql://dude:pass@localhost/test' + + def test_connect_string(self): + connect_string = utils.get_connect_string(**self.full_credentials) + self.assertEqual(connect_string, self.connect_string) + + def test_connect_string_sqlite(self): + sqlite_credentials = {'backend': 'sqlite', 'database': 'test.db'} + connect_string = utils.get_connect_string(**sqlite_credentials) + self.assertEqual(connect_string, 'sqlite:///test.db') + + def test_is_backend_avail(self): + self.mox.StubOutWithMock(sqlalchemy.engine.base.Engine, 'connect') + fake_connection = self.mox.CreateMockAnything() + fake_connection.close() + sqlalchemy.engine.base.Engine.connect().AndReturn(fake_connection) + self.mox.ReplayAll() + + self.assertTrue(utils.is_backend_avail(**self.full_credentials)) + + def test_is_backend_unavail(self): + self.mox.StubOutWithMock(sqlalchemy.engine.base.Engine, 'connect') + sqlalchemy.engine.base.Engine.connect().AndRaise(OperationalError) + self.mox.ReplayAll() + + self.assertFalse(utils.is_backend_avail(**self.full_credentials)) + + def test_get_db_connection_info(self): + conn_pieces = parse.urlparse(self.connect_string) + self.assertEqual(utils.get_db_connection_info(conn_pieces), + ('dude', 'pass', 'test', 'localhost')) + + +class TestRaiseDuplicateEntryError(test_base.BaseTestCase): + def _test_impl(self, engine_name, error_msg): + try: + error = sqlalchemy.exc.IntegrityError('test', 'test', error_msg) + session._raise_if_duplicate_entry_error(error, engine_name) + except exception.DBDuplicateEntry as e: + self.assertEqual(e.columns, ['a', 'b']) + else: + self.fail('DBDuplicateEntry was not raised') + + def test_sqlite(self): + self._test_impl( + 'sqlite', + '(IntegrityError) column a, b are not unique' + ) + + def test_sqlite_3_7_16_or_3_8_2_and_higher(self): + self._test_impl( + 'sqlite', + '(IntegrityError) UNIQUE constraint failed: tbl.a, tbl.b' + ) + + def test_mysql(self): + self._test_impl( + 'mysql', + '(IntegrityError) (1062, "Duplicate entry ' + '\'2-3\' for key \'uniq_tbl0a0b\'")' + ) + + def test_postgresql(self): + self._test_impl( + 'postgresql', + '(IntegrityError) duplicate key value violates unique constraint' + '"uniq_tbl0a0b"' + '\nDETAIL: Key (a, b)=(2, 3) already exists.\n' + ) + + def test_unsupported_backend_returns_none(self): + error = sqlalchemy.exc.IntegrityError('test', 'test', 'test') + rv = session._raise_if_duplicate_entry_error('oracle', error) + + self.assertIsNone(rv) + + +class MyModelSoftDeletedProjectId(declarative_base(), models.ModelBase, + models.SoftDeleteMixin): + __tablename__ = 'soft_deleted_project_id_test_model' + id = Column(Integer, primary_key=True) + project_id = Column(Integer) + + +class MyModel(declarative_base(), models.ModelBase): + __tablename__ = 'test_model' + id = Column(Integer, primary_key=True) + + +class MyModelSoftDeleted(declarative_base(), models.ModelBase, + models.SoftDeleteMixin): + __tablename__ = 'soft_deleted_test_model' + id = Column(Integer, primary_key=True) + + +class TestModelQuery(test_base.BaseTestCase): + + def setUp(self): + super(TestModelQuery, self).setUp() + + self.session = mock.MagicMock() + self.session.query.return_value = self.session.query + self.session.query.filter.return_value = self.session.query + self.user_context = mock.MagicMock(is_admin=False, read_deleted='yes', + user_id=42, project_id=43) + + def test_wrong_model(self): + self.assertRaises(TypeError, utils.model_query, self.user_context, + FakeModel, session=self.session) + + def test_no_soft_deleted(self): + self.assertRaises(ValueError, utils.model_query, self.user_context, + MyModel, session=self.session) + + def test_read_deleted_only(self): + mock_query = utils.model_query( + self.user_context, MyModelSoftDeleted, + session=self.session, read_deleted='only') + + deleted_filter = mock_query.filter.call_args[0][0] + self.assertEqual(str(deleted_filter), + 'soft_deleted_test_model.deleted != :deleted_1') + self.assertEqual(deleted_filter.right.value, + MyModelSoftDeleted.__mapper__.c.deleted.default.arg) + + def test_read_deleted_no(self): + mock_query = utils.model_query( + self.user_context, MyModelSoftDeleted, + session=self.session, read_deleted='no') + + deleted_filter = mock_query.filter.call_args[0][0] + self.assertEqual(str(deleted_filter), + 'soft_deleted_test_model.deleted = :deleted_1') + self.assertEqual(deleted_filter.right.value, + MyModelSoftDeleted.__mapper__.c.deleted.default.arg) + + def test_read_deleted_yes(self): + mock_query = utils.model_query( + self.user_context, MyModelSoftDeleted, + session=self.session, read_deleted='yes') + + self.assertEqual(mock_query.filter.call_count, 0) + + def test_wrong_read_deleted(self): + self.assertRaises(ValueError, utils.model_query, self.user_context, + MyModelSoftDeleted, session=self.session, + read_deleted='ololo') + + def test_project_only_true(self): + mock_query = utils.model_query( + self.user_context, MyModelSoftDeletedProjectId, + session=self.session, project_only=True) + + deleted_filter = mock_query.filter.call_args[0][0] + self.assertEqual( + str(deleted_filter), + 'soft_deleted_project_id_test_model.project_id = :project_id_1') + self.assertEqual(deleted_filter.right.value, + self.user_context.project_id) + + def test_project_filter_wrong_model(self): + self.assertRaises(ValueError, utils.model_query, self.user_context, + MyModelSoftDeleted, session=self.session, + project_only=True) + + def test_read_deleted_allow_none(self): + mock_query = utils.model_query( + self.user_context, MyModelSoftDeletedProjectId, + session=self.session, project_only='allow_none') + + self.assertEqual( + str(mock_query.filter.call_args[0][0]), + 'soft_deleted_project_id_test_model.project_id = :project_id_1 OR' + ' soft_deleted_project_id_test_model.project_id IS NULL' + ) + + @mock.patch.object(utils, "_read_deleted_filter") + @mock.patch.object(utils, "_project_filter") + def test_context_show_deleted(self, _project_filter, _read_deleted_filter): + user_context = mock.MagicMock(is_admin=False, show_deleted='yes', + user_id=42, project_id=43) + delattr(user_context, 'read_deleted') + _read_deleted_filter.return_value = self.session.query + _project_filter.return_value = self.session.query + utils.model_query(user_context, MyModel, + args=(MyModel.id,), session=self.session) + + self.session.query.assert_called_with(MyModel.id) + _read_deleted_filter.assert_called_with( + self.session.query, MyModel, user_context.show_deleted) + _project_filter.assert_called_with( + self.session.query, MyModel, user_context, False) + + @mock.patch.object(utils, "_read_deleted_filter") + @mock.patch.object(utils, "_project_filter") + def test_model_query_common(self, _project_filter, _read_deleted_filter): + _read_deleted_filter.return_value = self.session.query + _project_filter.return_value = self.session.query + utils.model_query(self.user_context, MyModel, + args=(MyModel.id,), session=self.session) + + self.session.query.assert_called_with(MyModel.id) + _read_deleted_filter.assert_called_with( + self.session.query, MyModel, self.user_context.read_deleted) + _project_filter.assert_called_with( + self.session.query, MyModel, self.user_context, False) diff --git a/tests/test_api.py b/tests/test_api.py new file mode 100644 index 0000000..5534757 --- /dev/null +++ b/tests/test_api.py @@ -0,0 +1,167 @@ +# Copyright (c) 2013 Rackspace Hosting +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Unit tests for DB API.""" + +import mock + +from oslo.db import api +from oslo.db import exception +from oslo.db.openstack.common import importutils +from tests import utils as test_utils + +sqla = importutils.import_module('sqlalchemy') + + +def get_backend(): + return DBAPI() + + +class DBAPI(object): + def _api_raise(self, *args, **kwargs): + """Simulate raising a database-has-gone-away error + + This method creates a fake OperationalError with an ID matching + a valid MySQL "database has gone away" situation. It also decrements + the error_counter so that we can artificially keep track of + how many times this function is called by the wrapper. When + error_counter reaches zero, this function returns True, simulating + the database becoming available again and the query succeeding. + """ + + if self.error_counter > 0: + self.error_counter -= 1 + orig = sqla.exc.DBAPIError(False, False, False) + orig.args = [2006, 'Test raise operational error'] + e = exception.DBConnectionError(orig) + raise e + else: + return True + + def api_raise_default(self, *args, **kwargs): + return self._api_raise(*args, **kwargs) + + @api.safe_for_db_retry + def api_raise_enable_retry(self, *args, **kwargs): + return self._api_raise(*args, **kwargs) + + def api_class_call1(_self, *args, **kwargs): + return args, kwargs + + +class DBAPITestCase(test_utils.BaseTestCase): + def test_dbapi_full_path_module_method(self): + dbapi = api.DBAPI('tests.test_api') + result = dbapi.api_class_call1(1, 2, kwarg1='meow') + expected = ((1, 2), {'kwarg1': 'meow'}) + self.assertEqual(expected, result) + + def test_dbapi_unknown_invalid_backend(self): + self.assertRaises(ImportError, api.DBAPI, 'tests.unit.db.not_existent') + + def test_dbapi_lazy_loading(self): + dbapi = api.DBAPI('tests.test_api', lazy=True) + + self.assertIsNone(dbapi._backend) + dbapi.api_class_call1(1, 'abc') + self.assertIsNotNone(dbapi._backend) + + +class DBReconnectTestCase(DBAPITestCase): + def setUp(self): + super(DBReconnectTestCase, self).setUp() + + self.test_db_api = DBAPI() + patcher = mock.patch(__name__ + '.get_backend', + return_value=self.test_db_api) + patcher.start() + self.addCleanup(patcher.stop) + + def test_raise_connection_error(self): + self.dbapi = api.DBAPI('sqlalchemy', {'sqlalchemy': __name__}) + + self.test_db_api.error_counter = 5 + self.assertRaises(exception.DBConnectionError, self.dbapi._api_raise) + + def test_raise_connection_error_decorated(self): + self.dbapi = api.DBAPI('sqlalchemy', {'sqlalchemy': __name__}) + + self.test_db_api.error_counter = 5 + self.assertRaises(exception.DBConnectionError, + self.dbapi.api_raise_enable_retry) + self.assertEqual(4, self.test_db_api.error_counter, 'Unexpected retry') + + def test_raise_connection_error_enabled(self): + self.dbapi = api.DBAPI('sqlalchemy', + {'sqlalchemy': __name__}, + use_db_reconnect=True) + + self.test_db_api.error_counter = 5 + self.assertRaises(exception.DBConnectionError, + self.dbapi.api_raise_default) + self.assertEqual(4, self.test_db_api.error_counter, 'Unexpected retry') + + def test_retry_one(self): + self.dbapi = api.DBAPI('sqlalchemy', + {'sqlalchemy': __name__}, + use_db_reconnect=True, + retry_interval=1) + + try: + func = self.dbapi.api_raise_enable_retry + self.test_db_api.error_counter = 1 + self.assertTrue(func(), 'Single retry did not succeed.') + except Exception: + self.fail('Single retry raised an un-wrapped error.') + + self.assertEqual( + 0, self.test_db_api.error_counter, + 'Counter not decremented, retry logic probably failed.') + + def test_retry_two(self): + self.dbapi = api.DBAPI('sqlalchemy', + {'sqlalchemy': __name__}, + use_db_reconnect=True, + retry_interval=1, + inc_retry_interval=False) + + try: + func = self.dbapi.api_raise_enable_retry + self.test_db_api.error_counter = 2 + self.assertTrue(func(), 'Multiple retry did not succeed.') + except Exception: + self.fail('Multiple retry raised an un-wrapped error.') + + self.assertEqual( + 0, self.test_db_api.error_counter, + 'Counter not decremented, retry logic probably failed.') + + def test_retry_until_failure(self): + self.dbapi = api.DBAPI('sqlalchemy', + {'sqlalchemy': __name__}, + use_db_reconnect=True, + retry_interval=1, + inc_retry_interval=False, + max_retries=3) + + func = self.dbapi.api_raise_enable_retry + self.test_db_api.error_counter = 5 + self.assertRaises( + exception.DBError, func, + 'Retry of permanent failure did not throw DBError exception.') + + self.assertNotEqual( + 0, self.test_db_api.error_counter, + 'Retry did not stop after sql_max_retries iterations.') diff --git a/tests/test_db.py b/tests/test_db.py deleted file mode 100644 index 6156b0e..0000000 --- a/tests/test_db.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- coding: utf-8 -*- - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -test_db ----------------------------------- - -Tests for `db` module. -""" - -from . import base - - -class TestDb(base.TestCase): - - def test_something(self): - pass \ No newline at end of file diff --git a/tests/unit/db/__init__.py b/tests/unit/db/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/db/sqlalchemy/__init__.py b/tests/unit/db/sqlalchemy/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/db/sqlalchemy/test_migrate.py b/tests/unit/db/sqlalchemy/test_migrate.py deleted file mode 100644 index 23833e1..0000000 --- a/tests/unit/db/sqlalchemy/test_migrate.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright (c) 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from migrate.changeset.constraint import UniqueConstraint -from migrate.changeset.databases import sqlite -import sqlalchemy as sa - -from oslo.db.sqlalchemy import migration -from oslo.db.sqlalchemy import test_base - - -def uniques(*constraints): - """Make a sequence of UniqueConstraint instances easily comparable - - Convert a sequence of UniqueConstraint instances into a set of - tuples of form (constraint_name, (constraint_columns)) so that - assertEqual() will be able to compare sets of unique constraints - - """ - - return set((uc.name, tuple(uc.columns.keys())) for uc in constraints) - - -class TestSqliteUniqueConstraints(test_base.DbTestCase): - def setUp(self): - super(TestSqliteUniqueConstraints, self).setUp() - - migration.patch_migrate() - - self.helper = sqlite.SQLiteHelper() - - test_table = sa.Table( - 'test_table', - sa.schema.MetaData(bind=self.engine), - sa.Column('a', sa.Integer), - sa.Column('b', sa.String(10)), - sa.Column('c', sa.Integer), - sa.UniqueConstraint('a', 'b', name='unique_a_b'), - sa.UniqueConstraint('b', 'c', name='unique_b_c') - ) - test_table.create() - self.addCleanup(test_table.drop) - # NOTE(rpodolyaka): it's important to use the reflected table here - # rather than original one because this is what - # we actually do in db migrations code - self.reflected_table = sa.Table( - 'test_table', - sa.schema.MetaData(bind=self.engine), - autoload=True - ) - - @test_base.backend_specific('sqlite') - def test_get_unique_constraints(self): - table = self.reflected_table - - existing = uniques(*self.helper._get_unique_constraints(table)) - should_be = uniques( - sa.UniqueConstraint(table.c.a, table.c.b, name='unique_a_b'), - sa.UniqueConstraint(table.c.b, table.c.c, name='unique_b_c'), - ) - self.assertEqual(should_be, existing) - - @test_base.backend_specific('sqlite') - def test_add_unique_constraint(self): - table = self.reflected_table - UniqueConstraint(table.c.a, table.c.c, name='unique_a_c').create() - - existing = uniques(*self.helper._get_unique_constraints(table)) - should_be = uniques( - sa.UniqueConstraint(table.c.a, table.c.b, name='unique_a_b'), - sa.UniqueConstraint(table.c.b, table.c.c, name='unique_b_c'), - sa.UniqueConstraint(table.c.a, table.c.c, name='unique_a_c'), - ) - self.assertEqual(should_be, existing) - - @test_base.backend_specific('sqlite') - def test_drop_unique_constraint(self): - table = self.reflected_table - UniqueConstraint(table.c.a, table.c.b, name='unique_a_b').drop() - - existing = uniques(*self.helper._get_unique_constraints(table)) - should_be = uniques( - sa.UniqueConstraint(table.c.b, table.c.c, name='unique_b_c'), - ) - self.assertEqual(should_be, existing) diff --git a/tests/unit/db/sqlalchemy/test_migrate_cli.py b/tests/unit/db/sqlalchemy/test_migrate_cli.py deleted file mode 100644 index f23ee87..0000000 --- a/tests/unit/db/sqlalchemy/test_migrate_cli.py +++ /dev/null @@ -1,217 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import mock -from oslotest import base as test_base - -from oslo.db.sqlalchemy.migration_cli import ext_alembic -from oslo.db.sqlalchemy.migration_cli import ext_migrate -from oslo.db.sqlalchemy.migration_cli import manager - - -class MockWithCmp(mock.MagicMock): - - order = 0 - - def __cmp__(self, other): - return self.order > other.order - - -@mock.patch(('oslo.db.sqlalchemy.migration_cli.' - 'ext_alembic.alembic.command')) -class TestAlembicExtension(test_base.BaseTestCase): - - def setUp(self): - self.migration_config = {'alembic_ini_path': '.', - 'db_url': 'sqlite://'} - self.alembic = ext_alembic.AlembicExtension(self.migration_config) - super(TestAlembicExtension, self).setUp() - - def test_check_enabled_true(self, command): - """Verifies that enabled returns True on non empty - alembic_ini_path conf variable - """ - self.assertTrue(self.alembic.enabled) - - def test_check_enabled_false(self, command): - """Verifies enabled returns False on empty - alembic_ini_path variable - """ - self.migration_config['alembic_ini_path'] = '' - alembic = ext_alembic.AlembicExtension(self.migration_config) - self.assertFalse(alembic.enabled) - - def test_upgrade_none(self, command): - self.alembic.upgrade(None) - command.upgrade.assert_called_once_with(self.alembic.config, 'head') - - def test_upgrade_normal(self, command): - self.alembic.upgrade('131daa') - command.upgrade.assert_called_once_with(self.alembic.config, '131daa') - - def test_downgrade_none(self, command): - self.alembic.downgrade(None) - command.downgrade.assert_called_once_with(self.alembic.config, 'base') - - def test_downgrade_int(self, command): - self.alembic.downgrade(111) - command.downgrade.assert_called_once_with(self.alembic.config, 'base') - - def test_downgrade_normal(self, command): - self.alembic.downgrade('131daa') - command.downgrade.assert_called_once_with( - self.alembic.config, '131daa') - - def test_revision(self, command): - self.alembic.revision(message='test', autogenerate=True) - command.revision.assert_called_once_with( - self.alembic.config, message='test', autogenerate=True) - - def test_stamp(self, command): - self.alembic.stamp('stamp') - command.stamp.assert_called_once_with( - self.alembic.config, revision='stamp') - - def test_version(self, command): - version = self.alembic.version() - self.assertIsNone(version) - - -@mock.patch(('oslo.db.sqlalchemy.migration_cli.' - 'ext_migrate.migration')) -class TestMigrateExtension(test_base.BaseTestCase): - - def setUp(self): - self.migration_config = {'migration_repo_path': '.', - 'db_url': 'sqlite://'} - self.migrate = ext_migrate.MigrateExtension(self.migration_config) - super(TestMigrateExtension, self).setUp() - - def test_check_enabled_true(self, migration): - self.assertTrue(self.migrate.enabled) - - def test_check_enabled_false(self, migration): - self.migration_config['migration_repo_path'] = '' - migrate = ext_migrate.MigrateExtension(self.migration_config) - self.assertFalse(migrate.enabled) - - def test_upgrade_head(self, migration): - self.migrate.upgrade('head') - migration.db_sync.assert_called_once_with( - self.migrate.engine, self.migrate.repository, None, init_version=0) - - def test_upgrade_normal(self, migration): - self.migrate.upgrade(111) - migration.db_sync.assert_called_once_with( - mock.ANY, self.migrate.repository, 111, init_version=0) - - def test_downgrade_init_version_from_base(self, migration): - self.migrate.downgrade('base') - migration.db_sync.assert_called_once_with( - self.migrate.engine, self.migrate.repository, mock.ANY, - init_version=mock.ANY) - - def test_downgrade_init_version_from_none(self, migration): - self.migrate.downgrade(None) - migration.db_sync.assert_called_once_with( - self.migrate.engine, self.migrate.repository, mock.ANY, - init_version=mock.ANY) - - def test_downgrade_normal(self, migration): - self.migrate.downgrade(101) - migration.db_sync.assert_called_once_with( - self.migrate.engine, self.migrate.repository, 101, init_version=0) - - def test_version(self, migration): - self.migrate.version() - migration.db_version.assert_called_once_with( - self.migrate.engine, self.migrate.repository, init_version=0) - - def test_change_init_version(self, migration): - self.migration_config['init_version'] = 101 - migrate = ext_migrate.MigrateExtension(self.migration_config) - migrate.downgrade(None) - migration.db_sync.assert_called_once_with( - migrate.engine, - self.migrate.repository, - self.migration_config['init_version'], - init_version=self.migration_config['init_version']) - - -class TestMigrationManager(test_base.BaseTestCase): - - def setUp(self): - self.migration_config = {'alembic_ini_path': '.', - 'migrate_repo_path': '.', - 'db_url': 'sqlite://'} - self.migration_manager = manager.MigrationManager( - self.migration_config) - self.ext = mock.Mock() - self.migration_manager._manager.extensions = [self.ext] - super(TestMigrationManager, self).setUp() - - def test_manager_update(self): - self.migration_manager.upgrade('head') - self.ext.obj.upgrade.assert_called_once_with('head') - - def test_manager_update_revision_none(self): - self.migration_manager.upgrade(None) - self.ext.obj.upgrade.assert_called_once_with(None) - - def test_downgrade_normal_revision(self): - self.migration_manager.downgrade('111abcd') - self.ext.obj.downgrade.assert_called_once_with('111abcd') - - def test_version(self): - self.migration_manager.version() - self.ext.obj.version.assert_called_once_with() - - def test_revision_message_autogenerate(self): - self.migration_manager.revision('test', True) - self.ext.obj.revision.assert_called_once_with('test', True) - - def test_revision_only_message(self): - self.migration_manager.revision('test', False) - self.ext.obj.revision.assert_called_once_with('test', False) - - def test_stamp(self): - self.migration_manager.stamp('stamp') - self.ext.obj.stamp.assert_called_once_with('stamp') - - -class TestMigrationRightOrder(test_base.BaseTestCase): - - def setUp(self): - self.migration_config = {'alembic_ini_path': '.', - 'migrate_repo_path': '.', - 'db_url': 'sqlite://'} - self.migration_manager = manager.MigrationManager( - self.migration_config) - self.first_ext = MockWithCmp() - self.first_ext.obj.order = 1 - self.first_ext.obj.upgrade.return_value = 100 - self.first_ext.obj.downgrade.return_value = 0 - self.second_ext = MockWithCmp() - self.second_ext.obj.order = 2 - self.second_ext.obj.upgrade.return_value = 200 - self.second_ext.obj.downgrade.return_value = 100 - self.migration_manager._manager.extensions = [self.first_ext, - self.second_ext] - super(TestMigrationRightOrder, self).setUp() - - def test_upgrade_right_order(self): - results = self.migration_manager.upgrade(None) - self.assertEqual(results, [100, 200]) - - def test_downgrade_right_order(self): - results = self.migration_manager.downgrade(None) - self.assertEqual(results, [100, 0]) diff --git a/tests/unit/db/sqlalchemy/test_migration_common.py b/tests/unit/db/sqlalchemy/test_migration_common.py deleted file mode 100644 index 9f05aff..0000000 --- a/tests/unit/db/sqlalchemy/test_migration_common.py +++ /dev/null @@ -1,211 +0,0 @@ -# Copyright 2013 Mirantis Inc. -# All Rights Reserved -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# - -import contextlib -import os -import tempfile - -from migrate import exceptions as migrate_exception -from migrate.versioning import api as versioning_api -import mock -import sqlalchemy - -from oslo.db import exception as db_exception -from oslo.db.sqlalchemy import migration -from oslo.db.sqlalchemy import test_base - - -class TestMigrationCommon(test_base.DbTestCase): - def setUp(self): - super(TestMigrationCommon, self).setUp() - - migration._REPOSITORY = None - self.path = tempfile.mkdtemp('test_migration') - self.path1 = tempfile.mkdtemp('test_migration') - self.return_value = '/home/openstack/migrations' - self.return_value1 = '/home/extension/migrations' - self.init_version = 1 - self.test_version = 123 - - self.patcher_repo = mock.patch.object(migration, 'Repository') - self.repository = self.patcher_repo.start() - self.repository.side_effect = [self.return_value, self.return_value1] - - self.mock_api_db = mock.patch.object(versioning_api, 'db_version') - self.mock_api_db_version = self.mock_api_db.start() - self.mock_api_db_version.return_value = self.test_version - - def tearDown(self): - os.rmdir(self.path) - self.mock_api_db.stop() - self.patcher_repo.stop() - super(TestMigrationCommon, self).tearDown() - - def test_find_migrate_repo_path_not_found(self): - self.assertRaises( - db_exception.DbMigrationError, - migration._find_migrate_repo, - "/foo/bar/", - ) - self.assertIsNone(migration._REPOSITORY) - - def test_find_migrate_repo_called_once(self): - my_repository = migration._find_migrate_repo(self.path) - self.repository.assert_called_once_with(self.path) - self.assertEqual(my_repository, self.return_value) - - def test_find_migrate_repo_called_few_times(self): - repo1 = migration._find_migrate_repo(self.path) - repo2 = migration._find_migrate_repo(self.path1) - self.assertNotEqual(repo1, repo2) - - def test_db_version_control(self): - with contextlib.nested( - mock.patch.object(migration, '_find_migrate_repo'), - mock.patch.object(versioning_api, 'version_control'), - ) as (mock_find_repo, mock_version_control): - mock_find_repo.return_value = self.return_value - - version = migration.db_version_control( - self.engine, self.path, self.test_version) - - self.assertEqual(version, self.test_version) - mock_version_control.assert_called_once_with( - self.engine, self.return_value, self.test_version) - - def test_db_version_return(self): - ret_val = migration.db_version(self.engine, self.path, - self.init_version) - self.assertEqual(ret_val, self.test_version) - - def test_db_version_raise_not_controlled_error_first(self): - with mock.patch.object(migration, 'db_version_control') as mock_ver: - - self.mock_api_db_version.side_effect = [ - migrate_exception.DatabaseNotControlledError('oups'), - self.test_version] - - ret_val = migration.db_version(self.engine, self.path, - self.init_version) - self.assertEqual(ret_val, self.test_version) - mock_ver.assert_called_once_with(self.engine, self.path, - version=self.init_version) - - def test_db_version_raise_not_controlled_error_tables(self): - with mock.patch.object(sqlalchemy, 'MetaData') as mock_meta: - self.mock_api_db_version.side_effect = \ - migrate_exception.DatabaseNotControlledError('oups') - my_meta = mock.MagicMock() - my_meta.tables = {'a': 1, 'b': 2} - mock_meta.return_value = my_meta - - self.assertRaises( - db_exception.DbMigrationError, migration.db_version, - self.engine, self.path, self.init_version) - - @mock.patch.object(versioning_api, 'version_control') - def test_db_version_raise_not_controlled_error_no_tables(self, mock_vc): - with mock.patch.object(sqlalchemy, 'MetaData') as mock_meta: - self.mock_api_db_version.side_effect = ( - migrate_exception.DatabaseNotControlledError('oups'), - self.init_version) - my_meta = mock.MagicMock() - my_meta.tables = {} - mock_meta.return_value = my_meta - migration.db_version(self.engine, self.path, self.init_version) - - mock_vc.assert_called_once_with(self.engine, self.return_value1, - self.init_version) - - def test_db_sync_wrong_version(self): - self.assertRaises(db_exception.DbMigrationError, - migration.db_sync, self.engine, self.path, 'foo') - - def test_db_sync_upgrade(self): - init_ver = 55 - with contextlib.nested( - mock.patch.object(migration, '_find_migrate_repo'), - mock.patch.object(versioning_api, 'upgrade') - ) as (mock_find_repo, mock_upgrade): - - mock_find_repo.return_value = self.return_value - self.mock_api_db_version.return_value = self.test_version - 1 - - migration.db_sync(self.engine, self.path, self.test_version, - init_ver) - - mock_upgrade.assert_called_once_with( - self.engine, self.return_value, self.test_version) - - def test_db_sync_downgrade(self): - with contextlib.nested( - mock.patch.object(migration, '_find_migrate_repo'), - mock.patch.object(versioning_api, 'downgrade') - ) as (mock_find_repo, mock_downgrade): - - mock_find_repo.return_value = self.return_value - self.mock_api_db_version.return_value = self.test_version + 1 - - migration.db_sync(self.engine, self.path, self.test_version) - - mock_downgrade.assert_called_once_with( - self.engine, self.return_value, self.test_version) - - def test_db_sync_sanity_called(self): - with contextlib.nested( - mock.patch.object(migration, '_find_migrate_repo'), - mock.patch.object(migration, '_db_schema_sanity_check'), - mock.patch.object(versioning_api, 'downgrade') - ) as (mock_find_repo, mock_sanity, mock_downgrade): - - mock_find_repo.return_value = self.return_value - migration.db_sync(self.engine, self.path, self.test_version) - - mock_sanity.assert_called_once() - - def test_db_sync_sanity_skipped(self): - with contextlib.nested( - mock.patch.object(migration, '_find_migrate_repo'), - mock.patch.object(migration, '_db_schema_sanity_check'), - mock.patch.object(versioning_api, 'downgrade') - ) as (mock_find_repo, mock_sanity, mock_downgrade): - - mock_find_repo.return_value = self.return_value - migration.db_sync(self.engine, self.path, self.test_version, False) - - mock_sanity.assert_not_called() - - def test_db_sanity_table_not_utf8(self): - with mock.patch.object(self, 'engine') as mock_eng: - type(mock_eng).name = mock.PropertyMock(return_value='mysql') - mock_eng.execute.return_value = [['table_A', 'latin1'], - ['table_B', 'latin1']] - - self.assertRaises(ValueError, migration._db_schema_sanity_check, - mock_eng) - - def test_db_sanity_table_not_utf8_exclude_migrate_tables(self): - with mock.patch.object(self, 'engine') as mock_eng: - type(mock_eng).name = mock.PropertyMock(return_value='mysql') - # NOTE(morganfainberg): Check both lower and upper case versions - # of the migration table names (validate case insensitivity in - # the sanity check. - mock_eng.execute.return_value = [['migrate_version', 'latin1'], - ['alembic_version', 'latin1'], - ['MIGRATE_VERSION', 'latin1'], - ['ALEMBIC_VERSION', 'latin1']] - - migration._db_schema_sanity_check(mock_eng) diff --git a/tests/unit/db/sqlalchemy/test_migrations.py b/tests/unit/db/sqlalchemy/test_migrations.py deleted file mode 100644 index f39bb73..0000000 --- a/tests/unit/db/sqlalchemy/test_migrations.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2010-2011 OpenStack Foundation -# Copyright 2012-2013 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import mock -from oslotest import base as test_base - -from oslo.db.sqlalchemy import test_migrations as migrate - - -class TestWalkVersions(test_base.BaseTestCase, migrate.WalkVersionsMixin): - def setUp(self): - super(TestWalkVersions, self).setUp() - self.migration_api = mock.MagicMock() - self.engine = mock.MagicMock() - self.REPOSITORY = mock.MagicMock() - self.INIT_VERSION = 4 - - def test_migrate_up(self): - self.migration_api.db_version.return_value = 141 - - self._migrate_up(self.engine, 141) - - self.migration_api.upgrade.assert_called_with( - self.engine, self.REPOSITORY, 141) - self.migration_api.db_version.assert_called_with( - self.engine, self.REPOSITORY) - - def test_migrate_up_with_data(self): - test_value = {"a": 1, "b": 2} - self.migration_api.db_version.return_value = 141 - self._pre_upgrade_141 = mock.MagicMock() - self._pre_upgrade_141.return_value = test_value - self._check_141 = mock.MagicMock() - - self._migrate_up(self.engine, 141, True) - - self._pre_upgrade_141.assert_called_with(self.engine) - self._check_141.assert_called_with(self.engine, test_value) - - def test_migrate_down(self): - self.migration_api.db_version.return_value = 42 - - self.assertTrue(self._migrate_down(self.engine, 42)) - self.migration_api.db_version.assert_called_with( - self.engine, self.REPOSITORY) - - def test_migrate_down_not_implemented(self): - self.migration_api.downgrade.side_effect = NotImplementedError - self.assertFalse(self._migrate_down(self.engine, 42)) - - def test_migrate_down_with_data(self): - self._post_downgrade_043 = mock.MagicMock() - self.migration_api.db_version.return_value = 42 - - self._migrate_down(self.engine, 42, True) - - self._post_downgrade_043.assert_called_with(self.engine) - - @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up') - @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down') - def test_walk_versions_all_default(self, _migrate_up, _migrate_down): - self.REPOSITORY.latest = 20 - self.migration_api.db_version.return_value = self.INIT_VERSION - - self._walk_versions() - - self.migration_api.version_control.assert_called_with( - None, self.REPOSITORY, self.INIT_VERSION) - self.migration_api.db_version.assert_called_with( - None, self.REPOSITORY) - - versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) - upgraded = [mock.call(None, v, with_data=True) for v in versions] - self.assertEqual(self._migrate_up.call_args_list, upgraded) - - downgraded = [mock.call(None, v - 1) for v in reversed(versions)] - self.assertEqual(self._migrate_down.call_args_list, downgraded) - - @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up') - @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down') - def test_walk_versions_all_true(self, _migrate_up, _migrate_down): - self.REPOSITORY.latest = 20 - self.migration_api.db_version.return_value = self.INIT_VERSION - - self._walk_versions(self.engine, snake_walk=True, downgrade=True) - - versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) - upgraded = [] - for v in versions: - upgraded.append(mock.call(self.engine, v, with_data=True)) - upgraded.append(mock.call(self.engine, v)) - upgraded.extend( - [mock.call(self.engine, v) for v in reversed(versions)] - ) - self.assertEqual(upgraded, self._migrate_up.call_args_list) - - downgraded_1 = [ - mock.call(self.engine, v - 1, with_data=True) for v in versions - ] - downgraded_2 = [] - for v in reversed(versions): - downgraded_2.append(mock.call(self.engine, v - 1)) - downgraded_2.append(mock.call(self.engine, v - 1)) - downgraded = downgraded_1 + downgraded_2 - self.assertEqual(self._migrate_down.call_args_list, downgraded) - - @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up') - @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down') - def test_walk_versions_true_false(self, _migrate_up, _migrate_down): - self.REPOSITORY.latest = 20 - self.migration_api.db_version.return_value = self.INIT_VERSION - - self._walk_versions(self.engine, snake_walk=True, downgrade=False) - - versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) - - upgraded = [] - for v in versions: - upgraded.append(mock.call(self.engine, v, with_data=True)) - upgraded.append(mock.call(self.engine, v)) - self.assertEqual(upgraded, self._migrate_up.call_args_list) - - downgraded = [ - mock.call(self.engine, v - 1, with_data=True) for v in versions - ] - self.assertEqual(self._migrate_down.call_args_list, downgraded) - - @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_up') - @mock.patch.object(migrate.WalkVersionsMixin, '_migrate_down') - def test_walk_versions_all_false(self, _migrate_up, _migrate_down): - self.REPOSITORY.latest = 20 - self.migration_api.db_version.return_value = self.INIT_VERSION - - self._walk_versions(self.engine, snake_walk=False, downgrade=False) - - versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) - - upgraded = [ - mock.call(self.engine, v, with_data=True) for v in versions - ] - self.assertEqual(upgraded, self._migrate_up.call_args_list) diff --git a/tests/unit/db/sqlalchemy/test_models.py b/tests/unit/db/sqlalchemy/test_models.py deleted file mode 100644 index 8c23da8..0000000 --- a/tests/unit/db/sqlalchemy/test_models.py +++ /dev/null @@ -1,132 +0,0 @@ -# Copyright 2012 Cloudscaling Group, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslotest import base as oslo_test -from sqlalchemy import Column -from sqlalchemy import Integer, String -from sqlalchemy.ext.declarative import declarative_base - -from oslo.db.sqlalchemy import models -from oslo.db.sqlalchemy import test_base - - -BASE = declarative_base() - - -class ModelBaseTest(test_base.DbTestCase): - - def test_modelbase_has_dict_methods(self): - dict_methods = ('__getitem__', - '__setitem__', - '__iter__', - 'get', - 'next', - 'update', - 'save', - 'iteritems') - for method in dict_methods: - self.assertTrue(hasattr(models.ModelBase, method)) - - def test_modelbase_set(self): - mb = models.ModelBase() - mb['world'] = 'hello' - self.assertEqual(mb['world'], 'hello') - - def test_modelbase_update(self): - mb = models.ModelBase() - h = {'a': '1', 'b': '2'} - mb.update(h) - for key in h.keys(): - self.assertEqual(mb[key], h[key]) - - def test_modelbase_iteritems(self): - self.skipTest("Requires DB") - mb = models.ModelBase() - h = {'a': '1', 'b': '2'} - mb.update(h) - for key, value in mb.iteritems(): - self.assertEqual(h[key], value) - - def test_modelbase_iter(self): - self.skipTest("Requires DB") - mb = models.ModelBase() - h = {'a': '1', 'b': '2'} - mb.update(h) - i = iter(mb) - - min_items = len(h) - found_items = 0 - while True: - r = next(i, None) - if r is None: - break - - self.assertTrue(r in h) - found_items += 1 - - self.assertEqual(min_items, found_items) - - def test_extra_keys_empty(self): - """Test verifies that by default extra_keys return empty list.""" - mb = models.ModelBase() - self.assertEqual(mb._extra_keys, []) - - def test_extra_keys_defined(self): - """Property _extra_keys will return list with attributes names.""" - ekm = ExtraKeysModel() - self.assertEqual(ekm._extra_keys, ['name']) - - def test_model_with_extra_keys(self): - item = ExtraKeysModel() - data = dict(item) - self.assertEqual(data, {'smth': None, - 'id': None, - 'name': 'NAME'}) - - -class ExtraKeysModel(BASE, models.ModelBase): - __tablename__ = 'test_model' - - id = Column(Integer, primary_key=True) - smth = Column(String(255)) - - @property - def name(self): - return 'NAME' - - @property - def _extra_keys(self): - return ['name'] - - -class TimestampMixinTest(oslo_test.BaseTestCase): - - def test_timestampmixin_attr(self): - - class TestModel(models.ModelBase, models.TimestampMixin): - pass - - dict_methods = ('__getitem__', - '__setitem__', - '__iter__', - 'get', - 'next', - 'update', - 'save', - 'iteritems', - 'created_at', - 'updated_at') - for method in dict_methods: - self.assertTrue(hasattr(TestModel, method)) diff --git a/tests/unit/db/sqlalchemy/test_options.py b/tests/unit/db/sqlalchemy/test_options.py deleted file mode 100644 index 10d6451..0000000 --- a/tests/unit/db/sqlalchemy/test_options.py +++ /dev/null @@ -1,120 +0,0 @@ -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo.config import cfg - -from openstack.common.fixture import config -from tests import utils as test_utils - - -cfg.CONF.import_opt('connection', 'oslo.db.options', - group='database') - - -class DbApiOptionsTestCase(test_utils.BaseTestCase): - def setUp(self): - super(DbApiOptionsTestCase, self).setUp() - - config_fixture = self.useFixture(config.Config()) - self.conf = config_fixture.conf - self.config = config_fixture.config - - def test_deprecated_session_parameters(self): - path = self.create_tempfiles([["tmp", b"""[DEFAULT] -sql_connection=x://y.z -sql_min_pool_size=10 -sql_max_pool_size=20 -sql_max_retries=30 -sql_retry_interval=40 -sql_max_overflow=50 -sql_connection_debug=60 -sql_connection_trace=True -"""]])[0] - self.conf(['--config-file', path]) - self.assertEqual(self.conf.database.connection, 'x://y.z') - self.assertEqual(self.conf.database.min_pool_size, 10) - self.assertEqual(self.conf.database.max_pool_size, 20) - self.assertEqual(self.conf.database.max_retries, 30) - self.assertEqual(self.conf.database.retry_interval, 40) - self.assertEqual(self.conf.database.max_overflow, 50) - self.assertEqual(self.conf.database.connection_debug, 60) - self.assertEqual(self.conf.database.connection_trace, True) - - def test_session_parameters(self): - path = self.create_tempfiles([["tmp", b"""[database] -connection=x://y.z -min_pool_size=10 -max_pool_size=20 -max_retries=30 -retry_interval=40 -max_overflow=50 -connection_debug=60 -connection_trace=True -pool_timeout=7 -"""]])[0] - self.conf(['--config-file', path]) - self.assertEqual(self.conf.database.connection, 'x://y.z') - self.assertEqual(self.conf.database.min_pool_size, 10) - self.assertEqual(self.conf.database.max_pool_size, 20) - self.assertEqual(self.conf.database.max_retries, 30) - self.assertEqual(self.conf.database.retry_interval, 40) - self.assertEqual(self.conf.database.max_overflow, 50) - self.assertEqual(self.conf.database.connection_debug, 60) - self.assertEqual(self.conf.database.connection_trace, True) - self.assertEqual(self.conf.database.pool_timeout, 7) - - def test_dbapi_database_deprecated_parameters(self): - path = self.create_tempfiles([['tmp', b'[DATABASE]\n' - b'sql_connection=fake_connection\n' - b'sql_idle_timeout=100\n' - b'sql_min_pool_size=99\n' - b'sql_max_pool_size=199\n' - b'sql_max_retries=22\n' - b'reconnect_interval=17\n' - b'sqlalchemy_max_overflow=101\n' - b'sqlalchemy_pool_timeout=5\n' - ]])[0] - self.conf(['--config-file', path]) - self.assertEqual(self.conf.database.connection, 'fake_connection') - self.assertEqual(self.conf.database.idle_timeout, 100) - self.assertEqual(self.conf.database.min_pool_size, 99) - self.assertEqual(self.conf.database.max_pool_size, 199) - self.assertEqual(self.conf.database.max_retries, 22) - self.assertEqual(self.conf.database.retry_interval, 17) - self.assertEqual(self.conf.database.max_overflow, 101) - self.assertEqual(self.conf.database.pool_timeout, 5) - - def test_dbapi_database_deprecated_parameters_sql(self): - path = self.create_tempfiles([['tmp', b'[sql]\n' - b'connection=test_sql_connection\n' - b'idle_timeout=99\n' - ]])[0] - self.conf(['--config-file', path]) - self.assertEqual(self.conf.database.connection, 'test_sql_connection') - self.assertEqual(self.conf.database.idle_timeout, 99) - - def test_deprecated_dbapi_parameters(self): - path = self.create_tempfiles([['tmp', b'[DEFAULT]\n' - b'db_backend=test_123\n' - ]])[0] - - self.conf(['--config-file', path]) - self.assertEqual(self.conf.database.backend, 'test_123') - - def test_dbapi_parameters(self): - path = self.create_tempfiles([['tmp', b'[database]\n' - b'backend=test_123\n' - ]])[0] - - self.conf(['--config-file', path]) - self.assertEqual(self.conf.database.backend, 'test_123') diff --git a/tests/unit/db/sqlalchemy/test_sqlalchemy.py b/tests/unit/db/sqlalchemy/test_sqlalchemy.py deleted file mode 100644 index 9cda622..0000000 --- a/tests/unit/db/sqlalchemy/test_sqlalchemy.py +++ /dev/null @@ -1,542 +0,0 @@ -# encoding=UTF8 - -# Copyright (c) 2012 Rackspace Hosting -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Unit tests for SQLAlchemy specific code.""" -import logging - -import _mysql_exceptions -import fixtures -import mock -from oslotest import base as oslo_test -import sqlalchemy -from sqlalchemy import Column, MetaData, Table, UniqueConstraint -from sqlalchemy import DateTime, Integer, String -from sqlalchemy import exc as sqla_exc -from sqlalchemy.exc import DataError -from sqlalchemy.ext.declarative import declarative_base - -from oslo.db import exception as db_exc -from oslo.db.sqlalchemy import models -from oslo.db.sqlalchemy import session -from oslo.db.sqlalchemy import test_base - - -BASE = declarative_base() -_TABLE_NAME = '__tmp__test__tmp__' - - -class TmpTable(BASE, models.ModelBase): - __tablename__ = _TABLE_NAME - id = Column(Integer, primary_key=True) - foo = Column(Integer) - - -class SessionErrorWrapperTestCase(test_base.DbTestCase): - def setUp(self): - super(SessionErrorWrapperTestCase, self).setUp() - meta = MetaData() - meta.bind = self.engine - test_table = Table(_TABLE_NAME, meta, - Column('id', Integer, primary_key=True, - nullable=False), - Column('deleted', Integer, default=0), - Column('deleted_at', DateTime), - Column('updated_at', DateTime), - Column('created_at', DateTime), - Column('foo', Integer), - UniqueConstraint('foo', name='uniq_foo')) - test_table.create() - self.addCleanup(test_table.drop) - - def test_flush_wrapper(self): - _session = self.sessionmaker() - - tbl = TmpTable() - tbl.update({'foo': 10}) - tbl.save(_session) - - tbl2 = TmpTable() - tbl2.update({'foo': 10}) - self.assertRaises(db_exc.DBDuplicateEntry, tbl2.save, _session) - - def test_execute_wrapper(self): - _session = self.sessionmaker() - with _session.begin(): - for i in [10, 20]: - tbl = TmpTable() - tbl.update({'foo': i}) - tbl.save(session=_session) - - method = _session.query(TmpTable).\ - filter_by(foo=10).\ - update - self.assertRaises(db_exc.DBDuplicateEntry, - method, {'foo': 20}) - - def test_ibm_db_sa_raise_if_duplicate_entry_error_duplicate(self): - # Tests that the session._raise_if_duplicate_entry_error method - # translates the duplicate entry integrity error for the DB2 engine. - statement = ('INSERT INTO key_pairs (created_at, updated_at, ' - 'deleted_at, deleted, name, user_id, fingerprint) VALUES ' - '(?, ?, ?, ?, ?, ?, ?)') - params = ['20130918001123627099', None, None, 0, 'keypair-23474772', - '974a7c9ffde6419f9811fcf94a917f47', - '7d:2c:58:7f:97:66:14:3f:27:c7:09:3c:26:95:66:4d'] - orig = sqla_exc.SQLAlchemyError( - 'SQL0803N One or more values in the INSERT statement, UPDATE ' - 'statement, or foreign key update caused by a DELETE statement are' - ' not valid because the primary key, unique constraint or unique ' - 'index identified by "2" constrains table "NOVA.KEY_PAIRS" from ' - 'having duplicate values for the index key.') - integrity_error = sqla_exc.IntegrityError(statement, params, orig) - self.assertRaises(db_exc.DBDuplicateEntry, - session._raise_if_duplicate_entry_error, - integrity_error, 'ibm_db_sa') - - def test_ibm_db_sa_raise_if_duplicate_entry_error_no_match(self): - # Tests that the session._raise_if_duplicate_entry_error method - # does not raise a DBDuplicateEntry exception when it's not a matching - # integrity error. - statement = ('ALTER TABLE instance_types ADD CONSTRAINT ' - 'uniq_name_x_deleted UNIQUE (name, deleted)') - params = None - orig = sqla_exc.SQLAlchemyError( - 'SQL0542N The column named "NAME" cannot be a column of a ' - 'primary key or unique key constraint because it can contain null ' - 'values.') - integrity_error = sqla_exc.IntegrityError(statement, params, orig) - session._raise_if_duplicate_entry_error(integrity_error, 'ibm_db_sa') - - -_REGEXP_TABLE_NAME = _TABLE_NAME + "regexp" - - -class RegexpTable(BASE, models.ModelBase): - __tablename__ = _REGEXP_TABLE_NAME - id = Column(Integer, primary_key=True) - bar = Column(String(255)) - - -class RegexpFilterTestCase(test_base.DbTestCase): - - def setUp(self): - super(RegexpFilterTestCase, self).setUp() - meta = MetaData() - meta.bind = self.engine - test_table = Table(_REGEXP_TABLE_NAME, meta, - Column('id', Integer, primary_key=True, - nullable=False), - Column('bar', String(255))) - test_table.create() - self.addCleanup(test_table.drop) - - def _test_regexp_filter(self, regexp, expected): - _session = self.sessionmaker() - with _session.begin(): - for i in ['10', '20', u'♥']: - tbl = RegexpTable() - tbl.update({'bar': i}) - tbl.save(session=_session) - - regexp_op = RegexpTable.bar.op('REGEXP')(regexp) - result = _session.query(RegexpTable).filter(regexp_op).all() - self.assertEqual([r.bar for r in result], expected) - - def test_regexp_filter(self): - self._test_regexp_filter('10', ['10']) - - def test_regexp_filter_nomatch(self): - self._test_regexp_filter('11', []) - - def test_regexp_filter_unicode(self): - self._test_regexp_filter(u'♥', [u'♥']) - - def test_regexp_filter_unicode_nomatch(self): - self._test_regexp_filter(u'♦', []) - - -class FakeDBAPIConnection(): - def cursor(self): - return FakeCursor() - - -class FakeCursor(): - def execute(self, sql): - pass - - -class FakeConnectionProxy(): - pass - - -class FakeConnectionRec(): - pass - - -class OperationalError(Exception): - pass - - -class ProgrammingError(Exception): - pass - - -class FakeDB2Engine(object): - - class Dialect(): - - def is_disconnect(self, e, *args): - expected_error = ('SQL30081N: DB2 Server connection is no longer ' - 'active') - return (str(e) == expected_error) - - dialect = Dialect() - name = 'ibm_db_sa' - - def dispose(self): - pass - - -class TestDBDisconnected(oslo_test.BaseTestCase): - - def _test_ping_listener_disconnected(self, connection): - engine_args = { - 'pool_recycle': 3600, - 'echo': False, - 'convert_unicode': True} - - engine = sqlalchemy.create_engine(connection, **engine_args) - with mock.patch.object(engine, 'dispose') as dispose_mock: - self.assertRaises(sqlalchemy.exc.DisconnectionError, - session._ping_listener, engine, - FakeDBAPIConnection(), FakeConnectionRec(), - FakeConnectionProxy()) - dispose_mock.assert_called_once_with() - - def test_mysql_ping_listener_disconnected(self): - def fake_execute(sql): - raise _mysql_exceptions.OperationalError(self.mysql_error, - ('MySQL server has ' - 'gone away')) - with mock.patch.object(FakeCursor, 'execute', - side_effect=fake_execute): - connection = 'mysql://root:password@fakehost/fakedb?charset=utf8' - for code in [2006, 2013, 2014, 2045, 2055]: - self.mysql_error = code - self._test_ping_listener_disconnected(connection) - - def test_db2_ping_listener_disconnected(self): - - def fake_execute(sql): - raise OperationalError('SQL30081N: DB2 Server ' - 'connection is no longer active') - with mock.patch.object(FakeCursor, 'execute', - side_effect=fake_execute): - # TODO(dperaza): Need a fake engine for db2 since ibm_db_sa is not - # in global requirements. Change this code to use real IBM db2 - # engine as soon as ibm_db_sa is included in global-requirements - # under openstack/requirements project. - fake_create_engine = lambda *args, **kargs: FakeDB2Engine() - with mock.patch.object(sqlalchemy, 'create_engine', - side_effect=fake_create_engine): - connection = ('ibm_db_sa://db2inst1:openstack@fakehost:50000' - '/fakedab') - self._test_ping_listener_disconnected(connection) - - -class MySQLModeTestCase(test_base.MySQLOpportunisticTestCase): - - def __init__(self, *args, **kwargs): - super(MySQLModeTestCase, self).__init__(*args, **kwargs) - # By default, run in empty SQL mode. - # Subclasses override this with specific modes. - self.mysql_mode = '' - - def setUp(self): - super(MySQLModeTestCase, self).setUp() - - self.engine = session.create_engine(self.engine.url, - mysql_sql_mode=self.mysql_mode) - self.connection = self.engine.connect() - - meta = MetaData() - meta.bind = self.engine - self.test_table = Table(_TABLE_NAME + "mode", meta, - Column('id', Integer, primary_key=True), - Column('bar', String(255))) - self.test_table.create() - - self.addCleanup(self.test_table.drop) - self.addCleanup(self.connection.close) - - def _test_string_too_long(self, value): - with self.connection.begin(): - self.connection.execute(self.test_table.insert(), - bar=value) - result = self.connection.execute(self.test_table.select()) - return result.fetchone()['bar'] - - def test_string_too_long(self): - value = 'a' * 512 - # String is too long. - # With no SQL mode set, this gets truncated. - self.assertNotEqual(value, - self._test_string_too_long(value)) - - -class MySQLStrictAllTablesModeTestCase(MySQLModeTestCase): - "Test data integrity enforcement in MySQL STRICT_ALL_TABLES mode." - - def __init__(self, *args, **kwargs): - super(MySQLStrictAllTablesModeTestCase, self).__init__(*args, **kwargs) - self.mysql_mode = 'STRICT_ALL_TABLES' - - def test_string_too_long(self): - value = 'a' * 512 - # String is too long. - # With STRICT_ALL_TABLES or TRADITIONAL mode set, this is an error. - self.assertRaises(DataError, - self._test_string_too_long, value) - - -class MySQLTraditionalModeTestCase(MySQLStrictAllTablesModeTestCase): - """Test data integrity enforcement in MySQL TRADITIONAL mode. - Since TRADITIONAL includes STRICT_ALL_TABLES, this inherits all - STRICT_ALL_TABLES mode tests. - """ - - def __init__(self, *args, **kwargs): - super(MySQLTraditionalModeTestCase, self).__init__(*args, **kwargs) - self.mysql_mode = 'TRADITIONAL' - - -class EngineFacadeTestCase(oslo_test.BaseTestCase): - def setUp(self): - super(EngineFacadeTestCase, self).setUp() - - self.facade = session.EngineFacade('sqlite://') - - def test_get_engine(self): - eng1 = self.facade.get_engine() - eng2 = self.facade.get_engine() - - self.assertIs(eng1, eng2) - - def test_get_session(self): - ses1 = self.facade.get_session() - ses2 = self.facade.get_session() - - self.assertIsNot(ses1, ses2) - - def test_get_session_arguments_override_default_settings(self): - ses = self.facade.get_session(autocommit=False, expire_on_commit=True) - - self.assertFalse(ses.autocommit) - self.assertTrue(ses.expire_on_commit) - - @mock.patch('oslo.db.sqlalchemy.session.get_maker') - @mock.patch('oslo.db.sqlalchemy.session.create_engine') - def test_creation_from_config(self, create_engine, get_maker): - conf = mock.MagicMock() - conf.database.items.return_value = [ - ('connection_debug', 100), - ('max_pool_size', 10), - ('mysql_sql_mode', 'TRADITIONAL'), - ] - - session.EngineFacade.from_config('sqlite:///:memory:', conf, - autocommit=False, - expire_on_commit=True) - - conf.database.items.assert_called_once_with() - create_engine.assert_called_once_with( - sql_connection='sqlite:///:memory:', - connection_debug=100, - max_pool_size=10, - mysql_sql_mode='TRADITIONAL', - sqlite_fk=False, - idle_timeout=mock.ANY, - retry_interval=mock.ANY, - max_retries=mock.ANY, - max_overflow=mock.ANY, - connection_trace=mock.ANY, - sqlite_synchronous=mock.ANY, - pool_timeout=mock.ANY, - ) - get_maker.assert_called_once_with(engine=create_engine(), - autocommit=False, - expire_on_commit=True) - - -class MysqlSetCallbackTest(oslo_test.BaseTestCase): - - class FakeCursor(object): - def __init__(self, execs): - self._execs = execs - - def execute(self, sql, arg): - self._execs.append(sql % arg) - - class FakeDbapiCon(object): - def __init__(self, execs): - self._execs = execs - - def cursor(self): - return MysqlSetCallbackTest.FakeCursor(self._execs) - - class FakeResultSet(object): - def __init__(self, realmode): - self._realmode = realmode - - def fetchone(self): - return ['ignored', self._realmode] - - class FakeEngine(object): - def __init__(self, realmode=None): - self._cbs = {} - self._execs = [] - self._realmode = realmode - self._connected = False - - def set_callback(self, name, cb): - self._cbs[name] = cb - - def connect(self, **kwargs): - cb = self._cbs.get('connect', lambda *x, **y: None) - dbapi_con = MysqlSetCallbackTest.FakeDbapiCon(self._execs) - connection_rec = None # Not used. - cb(dbapi_con, connection_rec) - - def execute(self, sql): - if not self._connected: - self.connect() - self._connected = True - self._execs.append(sql) - return MysqlSetCallbackTest.FakeResultSet(self._realmode) - - def stub_listen(engine, name, cb): - engine.set_callback(name, cb) - - @mock.patch.object(sqlalchemy.event, 'listen', side_effect=stub_listen) - def _call_set_callback(self, listen_mock, sql_mode=None, realmode=None): - engine = self.FakeEngine(realmode=realmode) - - self.stream = self.useFixture(fixtures.FakeLogger( - format="%(levelname)8s [%(name)s] %(message)s", - level=logging.DEBUG, - nuke_handlers=True - )) - - session._mysql_set_mode_callback(engine, sql_mode=sql_mode) - return engine - - def test_set_mode_traditional(self): - # If _mysql_set_mode_callback is called with an sql_mode, then the SQL - # mode is set on the connection. - - engine = self._call_set_callback(sql_mode='TRADITIONAL') - - exp_calls = [ - "SET SESSION sql_mode = ['TRADITIONAL']", - "SHOW VARIABLES LIKE 'sql_mode'" - ] - self.assertEqual(exp_calls, engine._execs) - - def test_set_mode_ansi(self): - # If _mysql_set_mode_callback is called with an sql_mode, then the SQL - # mode is set on the connection. - - engine = self._call_set_callback(sql_mode='ANSI') - - exp_calls = [ - "SET SESSION sql_mode = ['ANSI']", - "SHOW VARIABLES LIKE 'sql_mode'" - ] - self.assertEqual(exp_calls, engine._execs) - - def test_set_mode_no_mode(self): - # If _mysql_set_mode_callback is called with sql_mode=None, then - # the SQL mode is NOT set on the connection. - - engine = self._call_set_callback() - - exp_calls = [ - "SHOW VARIABLES LIKE 'sql_mode'" - ] - self.assertEqual(exp_calls, engine._execs) - - def test_fail_detect_mode(self): - # If "SHOW VARIABLES LIKE 'sql_mode'" results in no row, then - # we get a log indicating can't detect the mode. - - self._call_set_callback() - - self.assertIn('Unable to detect effective SQL mode', - self.stream.output) - - def test_logs_real_mode(self): - # If "SHOW VARIABLES LIKE 'sql_mode'" results in a value, then - # we get a log with the value. - - self._call_set_callback(realmode='SOMETHING') - - self.assertIn('MySQL server mode set to SOMETHING', - self.stream.output) - - def test_warning_when_not_traditional(self): - # If "SHOW VARIABLES LIKE 'sql_mode'" results in a value that doesn't - # include 'TRADITIONAL', then a warning is logged. - - self._call_set_callback(realmode='NOT_TRADIT') - - self.assertIn("consider enabling TRADITIONAL or STRICT_ALL_TABLES", - self.stream.output) - - def test_no_warning_when_traditional(self): - # If "SHOW VARIABLES LIKE 'sql_mode'" results in a value that includes - # 'TRADITIONAL', then no warning is logged. - - self._call_set_callback(realmode='TRADITIONAL') - - self.assertNotIn("consider enabling TRADITIONAL or STRICT_ALL_TABLES", - self.stream.output) - - def test_no_warning_when_strict_all_tables(self): - # If "SHOW VARIABLES LIKE 'sql_mode'" results in a value that includes - # 'STRICT_ALL_TABLES', then no warning is logged. - - self._call_set_callback(realmode='STRICT_ALL_TABLES') - - self.assertNotIn("consider enabling TRADITIONAL or STRICT_ALL_TABLES", - self.stream.output) - - def test_multiple_executes(self): - # We should only set the sql_mode on a connection once. - - engine = self._call_set_callback(sql_mode='TRADITIONAL', - realmode='TRADITIONAL') - - engine.execute('SELECT * FROM foo') - engine.execute('SELECT * FROM bar') - - exp_calls = [ - "SET SESSION sql_mode = ['TRADITIONAL']", - "SHOW VARIABLES LIKE 'sql_mode'", - "SELECT * FROM foo", - "SELECT * FROM bar", - ] - self.assertEqual(exp_calls, engine._execs) diff --git a/tests/unit/db/sqlalchemy/test_utils.py b/tests/unit/db/sqlalchemy/test_utils.py deleted file mode 100644 index d7f1783..0000000 --- a/tests/unit/db/sqlalchemy/test_utils.py +++ /dev/null @@ -1,838 +0,0 @@ -# Copyright (c) 2013 Boris Pavlovic (boris@pavlovic.me). -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid -import warnings - -from migrate.changeset import UniqueConstraint -import mock -from oslotest import base as test_base -import six -from six import moves -from six.moves.urllib import parse -import sqlalchemy -from sqlalchemy.dialects import mysql -from sqlalchemy import Boolean, Index, Integer, DateTime, String -from sqlalchemy import MetaData, Table, Column, ForeignKey -from sqlalchemy.engine import reflection -from sqlalchemy.exc import SAWarning, OperationalError -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.sql import select -from sqlalchemy.types import UserDefinedType, NullType - -from oslo.db import exception -from oslo.db.sqlalchemy import migration -from oslo.db.sqlalchemy import models -from oslo.db.sqlalchemy import session -from oslo.db.sqlalchemy import test_migrations -from oslo.db.sqlalchemy import utils -from openstack.common.fixture import moxstubout -from tests import utils as test_utils - - -SA_VERSION = tuple(map(int, sqlalchemy.__version__.split('.'))) - - -class TestSanitizeDbUrl(test_base.BaseTestCase): - - def test_url_with_cred(self): - db_url = 'myproto://johndoe:secret@localhost/myschema' - expected = 'myproto://****:****@localhost/myschema' - actual = utils.sanitize_db_url(db_url) - self.assertEqual(expected, actual) - - def test_url_with_no_cred(self): - db_url = 'sqlite:///mysqlitefile' - actual = utils.sanitize_db_url(db_url) - self.assertEqual(db_url, actual) - - -class CustomType(UserDefinedType): - """Dummy column type for testing unsupported types.""" - def get_col_spec(self): - return "CustomType" - - -class FakeModel(object): - def __init__(self, values): - self.values = values - - def __getattr__(self, name): - try: - value = self.values[name] - except KeyError: - raise AttributeError(name) - return value - - def __getitem__(self, key): - if key in self.values: - return self.values[key] - else: - raise NotImplementedError() - - def __repr__(self): - return '' % self.values - - -class TestPaginateQuery(test_base.BaseTestCase): - def setUp(self): - super(TestPaginateQuery, self).setUp() - mox_fixture = self.useFixture(moxstubout.MoxStubout()) - self.mox = mox_fixture.mox - self.query = self.mox.CreateMockAnything() - self.mox.StubOutWithMock(sqlalchemy, 'asc') - self.mox.StubOutWithMock(sqlalchemy, 'desc') - self.marker = FakeModel({ - 'user_id': 'user', - 'project_id': 'p', - 'snapshot_id': 's', - }) - self.model = FakeModel({ - 'user_id': 'user', - 'project_id': 'project', - 'snapshot_id': 'snapshot', - }) - - def test_paginate_query_no_pagination_no_sort_dirs(self): - sqlalchemy.asc('user').AndReturn('asc_3') - self.query.order_by('asc_3').AndReturn(self.query) - sqlalchemy.asc('project').AndReturn('asc_2') - self.query.order_by('asc_2').AndReturn(self.query) - sqlalchemy.asc('snapshot').AndReturn('asc_1') - self.query.order_by('asc_1').AndReturn(self.query) - self.query.limit(5).AndReturn(self.query) - self.mox.ReplayAll() - utils.paginate_query(self.query, self.model, 5, - ['user_id', 'project_id', 'snapshot_id']) - - def test_paginate_query_no_pagination(self): - sqlalchemy.asc('user').AndReturn('asc') - self.query.order_by('asc').AndReturn(self.query) - sqlalchemy.desc('project').AndReturn('desc') - self.query.order_by('desc').AndReturn(self.query) - self.query.limit(5).AndReturn(self.query) - self.mox.ReplayAll() - utils.paginate_query(self.query, self.model, 5, - ['user_id', 'project_id'], - sort_dirs=['asc', 'desc']) - - def test_paginate_query_attribute_error(self): - sqlalchemy.asc('user').AndReturn('asc') - self.query.order_by('asc').AndReturn(self.query) - self.mox.ReplayAll() - self.assertRaises(utils.InvalidSortKey, - utils.paginate_query, self.query, - self.model, 5, ['user_id', 'non-existent key']) - - def test_paginate_query_assertion_error(self): - self.mox.ReplayAll() - self.assertRaises(AssertionError, - utils.paginate_query, self.query, - self.model, 5, ['user_id'], - marker=self.marker, - sort_dir='asc', sort_dirs=['asc']) - - def test_paginate_query_assertion_error_2(self): - self.mox.ReplayAll() - self.assertRaises(AssertionError, - utils.paginate_query, self.query, - self.model, 5, ['user_id'], - marker=self.marker, - sort_dir=None, sort_dirs=['asc', 'desk']) - - def test_paginate_query(self): - sqlalchemy.asc('user').AndReturn('asc_1') - self.query.order_by('asc_1').AndReturn(self.query) - sqlalchemy.desc('project').AndReturn('desc_1') - self.query.order_by('desc_1').AndReturn(self.query) - self.mox.StubOutWithMock(sqlalchemy.sql, 'and_') - sqlalchemy.sql.and_(False).AndReturn('some_crit') - sqlalchemy.sql.and_(True, False).AndReturn('another_crit') - self.mox.StubOutWithMock(sqlalchemy.sql, 'or_') - sqlalchemy.sql.or_('some_crit', 'another_crit').AndReturn('some_f') - self.query.filter('some_f').AndReturn(self.query) - self.query.limit(5).AndReturn(self.query) - self.mox.ReplayAll() - utils.paginate_query(self.query, self.model, 5, - ['user_id', 'project_id'], - marker=self.marker, - sort_dirs=['asc', 'desc']) - - def test_paginate_query_value_error(self): - sqlalchemy.asc('user').AndReturn('asc_1') - self.query.order_by('asc_1').AndReturn(self.query) - self.mox.ReplayAll() - self.assertRaises(ValueError, utils.paginate_query, - self.query, self.model, 5, ['user_id', 'project_id'], - marker=self.marker, sort_dirs=['asc', 'mixed']) - - -class TestMigrationUtils(test_migrations.BaseMigrationTestCase): - """Class for testing utils that are used in db migrations.""" - - def setUp(self): - super(TestMigrationUtils, self).setUp() - migration.patch_migrate() - - def _populate_db_for_drop_duplicate_entries(self, engine, meta, - table_name): - values = [ - {'id': 11, 'a': 3, 'b': 10, 'c': 'abcdef'}, - {'id': 12, 'a': 5, 'b': 10, 'c': 'abcdef'}, - {'id': 13, 'a': 6, 'b': 10, 'c': 'abcdef'}, - {'id': 14, 'a': 7, 'b': 10, 'c': 'abcdef'}, - {'id': 21, 'a': 1, 'b': 20, 'c': 'aa'}, - {'id': 31, 'a': 1, 'b': 20, 'c': 'bb'}, - {'id': 41, 'a': 1, 'b': 30, 'c': 'aef'}, - {'id': 42, 'a': 2, 'b': 30, 'c': 'aef'}, - {'id': 43, 'a': 3, 'b': 30, 'c': 'aef'} - ] - - test_table = Table(table_name, meta, - Column('id', Integer, primary_key=True, - nullable=False), - Column('a', Integer), - Column('b', Integer), - Column('c', String(255)), - Column('deleted', Integer, default=0), - Column('deleted_at', DateTime), - Column('updated_at', DateTime)) - - test_table.create() - engine.execute(test_table.insert(), values) - return test_table, values - - def test_drop_old_duplicate_entries_from_table(self): - table_name = "__test_tmp_table__" - - for engine in self.engines.values(): - meta = MetaData() - meta.bind = engine - test_table, values = self._populate_db_for_drop_duplicate_entries( - engine, meta, table_name) - utils.drop_old_duplicate_entries_from_table( - engine, table_name, False, 'b', 'c') - - uniq_values = set() - expected_ids = [] - for value in sorted(values, key=lambda x: x['id'], reverse=True): - uniq_value = (('b', value['b']), ('c', value['c'])) - if uniq_value in uniq_values: - continue - uniq_values.add(uniq_value) - expected_ids.append(value['id']) - - real_ids = [row[0] for row in - engine.execute(select([test_table.c.id])).fetchall()] - - self.assertEqual(len(real_ids), len(expected_ids)) - for id_ in expected_ids: - self.assertTrue(id_ in real_ids) - - def test_drop_old_duplicate_entries_from_table_soft_delete(self): - table_name = "__test_tmp_table__" - - for engine in self.engines.values(): - meta = MetaData() - meta.bind = engine - table, values = self._populate_db_for_drop_duplicate_entries( - engine, meta, table_name) - utils.drop_old_duplicate_entries_from_table(engine, table_name, - True, 'b', 'c') - uniq_values = set() - expected_values = [] - soft_deleted_values = [] - - for value in sorted(values, key=lambda x: x['id'], reverse=True): - uniq_value = (('b', value['b']), ('c', value['c'])) - if uniq_value in uniq_values: - soft_deleted_values.append(value) - continue - uniq_values.add(uniq_value) - expected_values.append(value) - - base_select = table.select() - - rows_select = base_select.where(table.c.deleted != table.c.id) - row_ids = [row['id'] for row in - engine.execute(rows_select).fetchall()] - self.assertEqual(len(row_ids), len(expected_values)) - for value in expected_values: - self.assertTrue(value['id'] in row_ids) - - deleted_rows_select = base_select.where( - table.c.deleted == table.c.id) - deleted_rows_ids = [row['id'] for row in - engine.execute(deleted_rows_select).fetchall()] - self.assertEqual(len(deleted_rows_ids), - len(values) - len(row_ids)) - for value in soft_deleted_values: - self.assertTrue(value['id'] in deleted_rows_ids) - - def test_change_deleted_column_type_does_not_drop_index(self): - table_name = 'abc' - for engine in self.engines.values(): - meta = MetaData(bind=engine) - - indexes = { - 'idx_a_deleted': ['a', 'deleted'], - 'idx_b_deleted': ['b', 'deleted'], - 'idx_a': ['a'] - } - - index_instances = [Index(name, *columns) - for name, columns in six.iteritems(indexes)] - - table = Table(table_name, meta, - Column('id', Integer, primary_key=True), - Column('a', String(255)), - Column('b', String(255)), - Column('deleted', Boolean), - *index_instances) - table.create() - utils.change_deleted_column_type_to_id_type(engine, table_name) - utils.change_deleted_column_type_to_boolean(engine, table_name) - - insp = reflection.Inspector.from_engine(engine) - real_indexes = insp.get_indexes(table_name) - self.assertEqual(len(real_indexes), 3) - for index in real_indexes: - name = index['name'] - self.assertIn(name, indexes) - self.assertEqual(set(index['column_names']), - set(indexes[name])) - - def test_change_deleted_column_type_to_id_type_integer(self): - table_name = 'abc' - for engine in self.engines.values(): - meta = MetaData() - meta.bind = engine - table = Table(table_name, meta, - Column('id', Integer, primary_key=True), - Column('deleted', Boolean)) - table.create() - utils.change_deleted_column_type_to_id_type(engine, table_name) - - table = utils.get_table(engine, table_name) - self.assertTrue(isinstance(table.c.deleted.type, Integer)) - - def test_change_deleted_column_type_to_id_type_string(self): - table_name = 'abc' - for engine in self.engines.values(): - meta = MetaData() - meta.bind = engine - table = Table(table_name, meta, - Column('id', String(255), primary_key=True), - Column('deleted', Boolean)) - table.create() - utils.change_deleted_column_type_to_id_type(engine, table_name) - - table = utils.get_table(engine, table_name) - self.assertTrue(isinstance(table.c.deleted.type, String)) - - def test_change_deleted_column_type_to_id_type_custom(self): - table_name = 'abc' - engine = self.engines['sqlite'] - meta = MetaData() - meta.bind = engine - table = Table(table_name, meta, - Column('id', Integer, primary_key=True), - Column('foo', CustomType), - Column('deleted', Boolean)) - table.create() - - # reflection of custom types has been fixed upstream - if SA_VERSION < (0, 9, 0): - self.assertRaises(utils.ColumnError, - utils.change_deleted_column_type_to_id_type, - engine, table_name) - - fooColumn = Column('foo', CustomType()) - utils.change_deleted_column_type_to_id_type(engine, table_name, - foo=fooColumn) - - table = utils.get_table(engine, table_name) - # NOTE(boris-42): There is no way to check has foo type CustomType. - # but sqlalchemy will set it to NullType. This has - # been fixed upstream in recent SA versions - if SA_VERSION < (0, 9, 0): - self.assertTrue(isinstance(table.c.foo.type, NullType)) - self.assertTrue(isinstance(table.c.deleted.type, Integer)) - - def test_change_deleted_column_type_to_boolean(self): - table_name = 'abc' - for key, engine in self.engines.items(): - meta = MetaData() - meta.bind = engine - table = Table(table_name, meta, - Column('id', Integer, primary_key=True), - Column('deleted', Integer)) - table.create() - - utils.change_deleted_column_type_to_boolean(engine, table_name) - - table = utils.get_table(engine, table_name) - expected_type = Boolean if key != "mysql" else mysql.TINYINT - self.assertTrue(isinstance(table.c.deleted.type, expected_type)) - - def test_change_deleted_column_type_to_boolean_with_fc(self): - table_name_1 = 'abc' - table_name_2 = 'bcd' - for key, engine in self.engines.items(): - meta = MetaData() - meta.bind = engine - - table_1 = Table(table_name_1, meta, - Column('id', Integer, primary_key=True), - Column('deleted', Integer)) - table_1.create() - - table_2 = Table(table_name_2, meta, - Column('id', Integer, primary_key=True), - Column('foreign_id', Integer, - ForeignKey('%s.id' % table_name_1)), - Column('deleted', Integer)) - table_2.create() - - utils.change_deleted_column_type_to_boolean(engine, table_name_2) - - table = utils.get_table(engine, table_name_2) - expected_type = Boolean if key != "mysql" else mysql.TINYINT - self.assertTrue(isinstance(table.c.deleted.type, expected_type)) - - def test_change_deleted_column_type_to_boolean_type_custom(self): - table_name = 'abc' - engine = self.engines['sqlite'] - meta = MetaData() - meta.bind = engine - table = Table(table_name, meta, - Column('id', Integer, primary_key=True), - Column('foo', CustomType), - Column('deleted', Integer)) - table.create() - - # reflection of custom types has been fixed upstream - if SA_VERSION < (0, 9, 0): - self.assertRaises(utils.ColumnError, - utils.change_deleted_column_type_to_boolean, - engine, table_name) - - fooColumn = Column('foo', CustomType()) - utils.change_deleted_column_type_to_boolean(engine, table_name, - foo=fooColumn) - - table = utils.get_table(engine, table_name) - # NOTE(boris-42): There is no way to check has foo type CustomType. - # but sqlalchemy will set it to NullType. This has - # been fixed upstream in recent SA versions - if SA_VERSION < (0, 9, 0): - self.assertTrue(isinstance(table.c.foo.type, NullType)) - self.assertTrue(isinstance(table.c.deleted.type, Boolean)) - - def test_utils_drop_unique_constraint(self): - table_name = "__test_tmp_table__" - uc_name = 'uniq_foo' - values = [ - {'id': 1, 'a': 3, 'foo': 10}, - {'id': 2, 'a': 2, 'foo': 20}, - {'id': 3, 'a': 1, 'foo': 30}, - ] - for engine in self.engines.values(): - meta = MetaData() - meta.bind = engine - test_table = Table( - table_name, meta, - Column('id', Integer, primary_key=True, nullable=False), - Column('a', Integer), - Column('foo', Integer), - UniqueConstraint('a', name='uniq_a'), - UniqueConstraint('foo', name=uc_name), - ) - test_table.create() - - engine.execute(test_table.insert(), values) - # NOTE(boris-42): This method is generic UC dropper. - utils.drop_unique_constraint(engine, table_name, uc_name, 'foo') - - s = test_table.select().order_by(test_table.c.id) - rows = engine.execute(s).fetchall() - - for i in moves.range(len(values)): - v = values[i] - self.assertEqual((v['id'], v['a'], v['foo']), rows[i]) - - # NOTE(boris-42): Update data about Table from DB. - meta = MetaData() - meta.bind = engine - test_table = Table(table_name, meta, autoload=True) - constraints = [c for c in test_table.constraints - if c.name == uc_name] - self.assertEqual(len(constraints), 0) - self.assertEqual(len(test_table.constraints), 1) - - test_table.drop() - - def test_util_drop_unique_constraint_with_not_supported_sqlite_type(self): - table_name = "__test_tmp_table__" - uc_name = 'uniq_foo' - values = [ - {'id': 1, 'a': 3, 'foo': 10}, - {'id': 2, 'a': 2, 'foo': 20}, - {'id': 3, 'a': 1, 'foo': 30} - ] - - engine = self.engines['sqlite'] - meta = MetaData(bind=engine) - - test_table = Table( - table_name, meta, - Column('id', Integer, primary_key=True, nullable=False), - Column('a', Integer), - Column('foo', CustomType, default=0), - UniqueConstraint('a', name='uniq_a'), - UniqueConstraint('foo', name=uc_name), - ) - test_table.create() - - engine.execute(test_table.insert(), values) - warnings.simplefilter("ignore", SAWarning) - - # reflection of custom types has been fixed upstream - if SA_VERSION < (0, 9, 0): - # NOTE(boris-42): Missing info about column `foo` that has - # unsupported type CustomType. - self.assertRaises(utils.ColumnError, - utils.drop_unique_constraint, - engine, table_name, uc_name, 'foo') - - # NOTE(boris-42): Wrong type of foo instance. it should be - # instance of sqlalchemy.Column. - self.assertRaises(utils.ColumnError, - utils.drop_unique_constraint, - engine, table_name, uc_name, 'foo', - foo=Integer()) - - foo = Column('foo', CustomType, default=0) - utils.drop_unique_constraint( - engine, table_name, uc_name, 'foo', foo=foo) - - s = test_table.select().order_by(test_table.c.id) - rows = engine.execute(s).fetchall() - - for i in moves.range(len(values)): - v = values[i] - self.assertEqual((v['id'], v['a'], v['foo']), rows[i]) - - # NOTE(boris-42): Update data about Table from DB. - meta = MetaData(bind=engine) - test_table = Table(table_name, meta, autoload=True) - constraints = [c for c in test_table.constraints if c.name == uc_name] - self.assertEqual(len(constraints), 0) - self.assertEqual(len(test_table.constraints), 1) - test_table.drop() - - def test_drop_unique_constraint_in_sqlite_fk_recreate(self): - engine = self.engines['sqlite'] - meta = MetaData() - meta.bind = engine - parent_table = Table( - 'table0', meta, - Column('id', Integer, primary_key=True), - Column('foo', Integer), - ) - parent_table.create() - table_name = 'table1' - table = Table( - table_name, meta, - Column('id', Integer, primary_key=True), - Column('baz', Integer), - Column('bar', Integer, ForeignKey("table0.id")), - UniqueConstraint('baz', name='constr1') - ) - table.create() - utils.drop_unique_constraint(engine, table_name, 'constr1', 'baz') - - insp = reflection.Inspector.from_engine(engine) - f_keys = insp.get_foreign_keys(table_name) - self.assertEqual(len(f_keys), 1) - f_key = f_keys[0] - self.assertEqual(f_key['referred_table'], 'table0') - self.assertEqual(f_key['referred_columns'], ['id']) - self.assertEqual(f_key['constrained_columns'], ['bar']) - - def test_insert_from_select(self): - insert_table_name = "__test_insert_to_table__" - select_table_name = "__test_select_from_table__" - uuidstrs = [] - for unused in range(10): - uuidstrs.append(uuid.uuid4().hex) - for key, engine in self.engines.items(): - meta = MetaData() - meta.bind = engine - conn = engine.connect() - insert_table = Table( - insert_table_name, meta, - Column('id', Integer, primary_key=True, - nullable=False, autoincrement=True), - Column('uuid', String(36), nullable=False)) - select_table = Table( - select_table_name, meta, - Column('id', Integer, primary_key=True, - nullable=False, autoincrement=True), - Column('uuid', String(36), nullable=False)) - - insert_table.create() - select_table.create() - # Add 10 rows to select_table - for uuidstr in uuidstrs: - ins_stmt = select_table.insert().values(uuid=uuidstr) - conn.execute(ins_stmt) - - # Select 4 rows in one chunk from select_table - column = select_table.c.id - query_insert = select([select_table], - select_table.c.id < 5).order_by(column) - insert_statement = utils.InsertFromSelect(insert_table, - query_insert) - result_insert = conn.execute(insert_statement) - # Verify we insert 4 rows - self.assertEqual(result_insert.rowcount, 4) - - query_all = select([insert_table]).where( - insert_table.c.uuid.in_(uuidstrs)) - rows = conn.execute(query_all).fetchall() - # Verify we really have 4 rows in insert_table - self.assertEqual(len(rows), 4) - - insert_table.drop() - select_table.drop() - - -class TestConnectionUtils(test_utils.BaseTestCase): - - def setUp(self): - super(TestConnectionUtils, self).setUp() - - self.full_credentials = {'backend': 'mysql', - 'database': 'test', - 'user': 'dude', - 'passwd': 'pass'} - - self.connect_string = 'mysql://dude:pass@localhost/test' - - def test_connect_string(self): - connect_string = utils.get_connect_string(**self.full_credentials) - self.assertEqual(connect_string, self.connect_string) - - def test_connect_string_sqlite(self): - sqlite_credentials = {'backend': 'sqlite', 'database': 'test.db'} - connect_string = utils.get_connect_string(**sqlite_credentials) - self.assertEqual(connect_string, 'sqlite:///test.db') - - def test_is_backend_avail(self): - self.mox.StubOutWithMock(sqlalchemy.engine.base.Engine, 'connect') - fake_connection = self.mox.CreateMockAnything() - fake_connection.close() - sqlalchemy.engine.base.Engine.connect().AndReturn(fake_connection) - self.mox.ReplayAll() - - self.assertTrue(utils.is_backend_avail(**self.full_credentials)) - - def test_is_backend_unavail(self): - self.mox.StubOutWithMock(sqlalchemy.engine.base.Engine, 'connect') - sqlalchemy.engine.base.Engine.connect().AndRaise(OperationalError) - self.mox.ReplayAll() - - self.assertFalse(utils.is_backend_avail(**self.full_credentials)) - - def test_get_db_connection_info(self): - conn_pieces = parse.urlparse(self.connect_string) - self.assertEqual(utils.get_db_connection_info(conn_pieces), - ('dude', 'pass', 'test', 'localhost')) - - -class TestRaiseDuplicateEntryError(test_base.BaseTestCase): - def _test_impl(self, engine_name, error_msg): - try: - error = sqlalchemy.exc.IntegrityError('test', 'test', error_msg) - session._raise_if_duplicate_entry_error(error, engine_name) - except exception.DBDuplicateEntry as e: - self.assertEqual(e.columns, ['a', 'b']) - else: - self.fail('DBDuplicateEntry was not raised') - - def test_sqlite(self): - self._test_impl( - 'sqlite', - '(IntegrityError) column a, b are not unique' - ) - - def test_sqlite_3_7_16_or_3_8_2_and_higher(self): - self._test_impl( - 'sqlite', - '(IntegrityError) UNIQUE constraint failed: tbl.a, tbl.b' - ) - - def test_mysql(self): - self._test_impl( - 'mysql', - '(IntegrityError) (1062, "Duplicate entry ' - '\'2-3\' for key \'uniq_tbl0a0b\'")' - ) - - def test_postgresql(self): - self._test_impl( - 'postgresql', - '(IntegrityError) duplicate key value violates unique constraint' - '"uniq_tbl0a0b"' - '\nDETAIL: Key (a, b)=(2, 3) already exists.\n' - ) - - def test_unsupported_backend_returns_none(self): - error = sqlalchemy.exc.IntegrityError('test', 'test', 'test') - rv = session._raise_if_duplicate_entry_error('oracle', error) - - self.assertIsNone(rv) - - -class MyModelSoftDeletedProjectId(declarative_base(), models.ModelBase, - models.SoftDeleteMixin): - __tablename__ = 'soft_deleted_project_id_test_model' - id = Column(Integer, primary_key=True) - project_id = Column(Integer) - - -class MyModel(declarative_base(), models.ModelBase): - __tablename__ = 'test_model' - id = Column(Integer, primary_key=True) - - -class MyModelSoftDeleted(declarative_base(), models.ModelBase, - models.SoftDeleteMixin): - __tablename__ = 'soft_deleted_test_model' - id = Column(Integer, primary_key=True) - - -class TestModelQuery(test_base.BaseTestCase): - - def setUp(self): - super(TestModelQuery, self).setUp() - - self.session = mock.MagicMock() - self.session.query.return_value = self.session.query - self.session.query.filter.return_value = self.session.query - self.user_context = mock.MagicMock(is_admin=False, read_deleted='yes', - user_id=42, project_id=43) - - def test_wrong_model(self): - self.assertRaises(TypeError, utils.model_query, self.user_context, - FakeModel, session=self.session) - - def test_no_soft_deleted(self): - self.assertRaises(ValueError, utils.model_query, self.user_context, - MyModel, session=self.session) - - def test_read_deleted_only(self): - mock_query = utils.model_query( - self.user_context, MyModelSoftDeleted, - session=self.session, read_deleted='only') - - deleted_filter = mock_query.filter.call_args[0][0] - self.assertEqual(str(deleted_filter), - 'soft_deleted_test_model.deleted != :deleted_1') - self.assertEqual(deleted_filter.right.value, - MyModelSoftDeleted.__mapper__.c.deleted.default.arg) - - def test_read_deleted_no(self): - mock_query = utils.model_query( - self.user_context, MyModelSoftDeleted, - session=self.session, read_deleted='no') - - deleted_filter = mock_query.filter.call_args[0][0] - self.assertEqual(str(deleted_filter), - 'soft_deleted_test_model.deleted = :deleted_1') - self.assertEqual(deleted_filter.right.value, - MyModelSoftDeleted.__mapper__.c.deleted.default.arg) - - def test_read_deleted_yes(self): - mock_query = utils.model_query( - self.user_context, MyModelSoftDeleted, - session=self.session, read_deleted='yes') - - self.assertEqual(mock_query.filter.call_count, 0) - - def test_wrong_read_deleted(self): - self.assertRaises(ValueError, utils.model_query, self.user_context, - MyModelSoftDeleted, session=self.session, - read_deleted='ololo') - - def test_project_only_true(self): - mock_query = utils.model_query( - self.user_context, MyModelSoftDeletedProjectId, - session=self.session, project_only=True) - - deleted_filter = mock_query.filter.call_args[0][0] - self.assertEqual( - str(deleted_filter), - 'soft_deleted_project_id_test_model.project_id = :project_id_1') - self.assertEqual(deleted_filter.right.value, - self.user_context.project_id) - - def test_project_filter_wrong_model(self): - self.assertRaises(ValueError, utils.model_query, self.user_context, - MyModelSoftDeleted, session=self.session, - project_only=True) - - def test_read_deleted_allow_none(self): - mock_query = utils.model_query( - self.user_context, MyModelSoftDeletedProjectId, - session=self.session, project_only='allow_none') - - self.assertEqual( - str(mock_query.filter.call_args[0][0]), - 'soft_deleted_project_id_test_model.project_id = :project_id_1 OR' - ' soft_deleted_project_id_test_model.project_id IS NULL' - ) - - @mock.patch.object(utils, "_read_deleted_filter") - @mock.patch.object(utils, "_project_filter") - def test_context_show_deleted(self, _project_filter, _read_deleted_filter): - user_context = mock.MagicMock(is_admin=False, show_deleted='yes', - user_id=42, project_id=43) - delattr(user_context, 'read_deleted') - _read_deleted_filter.return_value = self.session.query - _project_filter.return_value = self.session.query - utils.model_query(user_context, MyModel, - args=(MyModel.id,), session=self.session) - - self.session.query.assert_called_with(MyModel.id) - _read_deleted_filter.assert_called_with( - self.session.query, MyModel, user_context.show_deleted) - _project_filter.assert_called_with( - self.session.query, MyModel, user_context, False) - - @mock.patch.object(utils, "_read_deleted_filter") - @mock.patch.object(utils, "_project_filter") - def test_model_query_common(self, _project_filter, _read_deleted_filter): - _read_deleted_filter.return_value = self.session.query - _project_filter.return_value = self.session.query - utils.model_query(self.user_context, MyModel, - args=(MyModel.id,), session=self.session) - - self.session.query.assert_called_with(MyModel.id) - _read_deleted_filter.assert_called_with( - self.session.query, MyModel, self.user_context.read_deleted) - _project_filter.assert_called_with( - self.session.query, MyModel, self.user_context, False) diff --git a/tests/unit/db/test_api.py b/tests/unit/db/test_api.py deleted file mode 100644 index c4b2bb9..0000000 --- a/tests/unit/db/test_api.py +++ /dev/null @@ -1,167 +0,0 @@ -# Copyright (c) 2013 Rackspace Hosting -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Unit tests for DB API.""" - -import mock - -from oslo.db import api -from oslo.db import exception -from openstack.common import importutils -from tests import utils as test_utils - -sqla = importutils.import_module('sqlalchemy') - - -def get_backend(): - return DBAPI() - - -class DBAPI(object): - def _api_raise(self, *args, **kwargs): - """Simulate raising a database-has-gone-away error - - This method creates a fake OperationalError with an ID matching - a valid MySQL "database has gone away" situation. It also decrements - the error_counter so that we can artificially keep track of - how many times this function is called by the wrapper. When - error_counter reaches zero, this function returns True, simulating - the database becoming available again and the query succeeding. - """ - - if self.error_counter > 0: - self.error_counter -= 1 - orig = sqla.exc.DBAPIError(False, False, False) - orig.args = [2006, 'Test raise operational error'] - e = exception.DBConnectionError(orig) - raise e - else: - return True - - def api_raise_default(self, *args, **kwargs): - return self._api_raise(*args, **kwargs) - - @api.safe_for_db_retry - def api_raise_enable_retry(self, *args, **kwargs): - return self._api_raise(*args, **kwargs) - - def api_class_call1(_self, *args, **kwargs): - return args, kwargs - - -class DBAPITestCase(test_utils.BaseTestCase): - def test_dbapi_full_path_module_method(self): - dbapi = api.DBAPI('tests.unit.db.test_api') - result = dbapi.api_class_call1(1, 2, kwarg1='meow') - expected = ((1, 2), {'kwarg1': 'meow'}) - self.assertEqual(expected, result) - - def test_dbapi_unknown_invalid_backend(self): - self.assertRaises(ImportError, api.DBAPI, 'tests.unit.db.not_existent') - - def test_dbapi_lazy_loading(self): - dbapi = api.DBAPI('tests.unit.db.test_api', lazy=True) - - self.assertIsNone(dbapi._backend) - dbapi.api_class_call1(1, 'abc') - self.assertIsNotNone(dbapi._backend) - - -class DBReconnectTestCase(DBAPITestCase): - def setUp(self): - super(DBReconnectTestCase, self).setUp() - - self.test_db_api = DBAPI() - patcher = mock.patch(__name__ + '.get_backend', - return_value=self.test_db_api) - patcher.start() - self.addCleanup(patcher.stop) - - def test_raise_connection_error(self): - self.dbapi = api.DBAPI('sqlalchemy', {'sqlalchemy': __name__}) - - self.test_db_api.error_counter = 5 - self.assertRaises(exception.DBConnectionError, self.dbapi._api_raise) - - def test_raise_connection_error_decorated(self): - self.dbapi = api.DBAPI('sqlalchemy', {'sqlalchemy': __name__}) - - self.test_db_api.error_counter = 5 - self.assertRaises(exception.DBConnectionError, - self.dbapi.api_raise_enable_retry) - self.assertEqual(4, self.test_db_api.error_counter, 'Unexpected retry') - - def test_raise_connection_error_enabled(self): - self.dbapi = api.DBAPI('sqlalchemy', - {'sqlalchemy': __name__}, - use_db_reconnect=True) - - self.test_db_api.error_counter = 5 - self.assertRaises(exception.DBConnectionError, - self.dbapi.api_raise_default) - self.assertEqual(4, self.test_db_api.error_counter, 'Unexpected retry') - - def test_retry_one(self): - self.dbapi = api.DBAPI('sqlalchemy', - {'sqlalchemy': __name__}, - use_db_reconnect=True, - retry_interval=1) - - try: - func = self.dbapi.api_raise_enable_retry - self.test_db_api.error_counter = 1 - self.assertTrue(func(), 'Single retry did not succeed.') - except Exception: - self.fail('Single retry raised an un-wrapped error.') - - self.assertEqual( - 0, self.test_db_api.error_counter, - 'Counter not decremented, retry logic probably failed.') - - def test_retry_two(self): - self.dbapi = api.DBAPI('sqlalchemy', - {'sqlalchemy': __name__}, - use_db_reconnect=True, - retry_interval=1, - inc_retry_interval=False) - - try: - func = self.dbapi.api_raise_enable_retry - self.test_db_api.error_counter = 2 - self.assertTrue(func(), 'Multiple retry did not succeed.') - except Exception: - self.fail('Multiple retry raised an un-wrapped error.') - - self.assertEqual( - 0, self.test_db_api.error_counter, - 'Counter not decremented, retry logic probably failed.') - - def test_retry_until_failure(self): - self.dbapi = api.DBAPI('sqlalchemy', - {'sqlalchemy': __name__}, - use_db_reconnect=True, - retry_interval=1, - inc_retry_interval=False, - max_retries=3) - - func = self.dbapi.api_raise_enable_retry - self.test_db_api.error_counter = 5 - self.assertRaises( - exception.DBError, func, - 'Retry of permanent failure did not throw DBError exception.') - - self.assertNotEqual( - 0, self.test_db_api.error_counter, - 'Retry did not stop after sql_max_retries iterations.') diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..93571cc --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,29 @@ +# Copyright 2010-2011 OpenStack Foundation +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslo.config import cfg +from oslotest import base as test_base + +from oslo.db.openstack.common.fixture import moxstubout + + +class BaseTestCase(test_base.BaseTestCase): + def setUp(self, conf=cfg.CONF): + super(BaseTestCase, self).setUp() + moxfixture = self.useFixture(moxstubout.MoxStubout()) + self.mox = moxfixture.mox + self.stubs = moxfixture.stubs + self.conf = conf + self.addCleanup(self.conf.reset) diff --git a/tox.ini b/tox.ini index 83f4dac..cde32ce 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] minversion = 1.6 -envlist = py26,py27,py33,pypy,pep8 +envlist = py26,py27,pep8 # NOTE(dhellmann): We cannot set skipdist=True # for oslo libraries because of the namespace package. #skipsdist = True @@ -12,6 +12,7 @@ envlist = py26,py27,py33,pypy,pep8 install_command = pip install -U {opts} {packages} setenv = VIRTUAL_ENV={envdir} + OSLO_LOCK_PATH=/tmp/ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = python setup.py testr --slowest --testr-args='{posargs}' @@ -23,13 +24,13 @@ commands = flake8 commands = {posargs} [testenv:cover] -commands = python setup.py testr --coverage --testr-args='{posargs}' +commands = OSLO_LOCK_PATH=/tmp/ python setup.py testr --coverage --testr-args='{posargs}' [flake8] # H803 skipped on purpose per list discussion. # E123, E125 skipped as they are invalid PEP-8. show-source = True -ignore = E123,E125,H803 +ignore = E123,E125,H803,W292 builtins = _ -exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build \ No newline at end of file +exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build -- cgit v1.2.1