summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPetr Blaho <petrblaho@gmail.com>2014-06-12 16:07:22 +0200
committerPetr Blaho <petrblaho@gmail.com>2014-08-03 17:19:06 +0200
commite1dbd31c5b9fbb35ab54340cb8f05404d446b8e1 (patch)
treedd945d57c570578781f37de10e84aeb42e5dc69b
parent423c17e02bf84dc66b84a7e669563dd4df68cf86 (diff)
downloadoslo-db-e1dbd31c5b9fbb35ab54340cb8f05404d446b8e1.tar.gz
Fixes indentations to pass E128 check.
Fixes indentations to pass E128 check. Removes E128 from ignore setting in tox.ini. Change-Id: I92ca574c076b4bd26e424a3b404188601264b5de
-rw-r--r--oslo/db/sqlalchemy/compat/handle_error.py24
-rw-r--r--oslo/db/sqlalchemy/exc_filters.py21
-rw-r--r--oslo/db/sqlalchemy/session.py2
-rw-r--r--oslo/db/sqlalchemy/test_migrations.py4
-rw-r--r--tests/sqlalchemy/test_exc_filters.py65
-rw-r--r--tests/sqlalchemy/test_handle_error.py12
-rw-r--r--tests/sqlalchemy/test_migrations.py6
-rw-r--r--tests/sqlalchemy/test_options.py20
-rw-r--r--tests/sqlalchemy/test_utils.py2
-rw-r--r--tox.ini2
10 files changed, 89 insertions, 69 deletions
diff --git a/oslo/db/sqlalchemy/compat/handle_error.py b/oslo/db/sqlalchemy/compat/handle_error.py
index 6929008..6f0debe 100644
--- a/oslo/db/sqlalchemy/compat/handle_error.py
+++ b/oslo/db/sqlalchemy/compat/handle_error.py
@@ -45,14 +45,14 @@ def handle_error(engine, listener):
# use a Connection-wrapper class to wrap _handle_dbapi_exception.
if not getattr(engine._connection_cls,
- '_oslo_handle_error_wrapper', False):
+ '_oslo_handle_error_wrapper', False):
engine._oslo_handle_error_events = []
class Connection(engine._connection_cls):
_oslo_handle_error_wrapper = True
def _handle_dbapi_exception(self, e, statement, parameters,
- cursor, context):
+ cursor, context):
try:
super(Connection, self)._handle_dbapi_exception(
@@ -69,19 +69,21 @@ def handle_error(engine, listener):
# re-raise
reraised_exception = e
- _oslo_handle_error_events = getattr(self.engine,
- '_oslo_handle_error_events', False)
+ _oslo_handle_error_events = getattr(
+ self.engine,
+ '_oslo_handle_error_events',
+ False)
newraise = None
if _oslo_handle_error_events:
if isinstance(reraised_exception,
- sqla_exc.StatementError):
+ sqla_exc.StatementError):
sqlalchemy_exception = reraised_exception
original_exception = sqlalchemy_exception.orig
- self._is_disconnect = is_disconnect = \
+ self._is_disconnect = is_disconnect = (
isinstance(sqlalchemy_exception,
- sqla_exc.DBAPIError) and sqlalchemy_exception.\
- connection_invalidated
+ sqla_exc.DBAPIError)
+ and sqlalchemy_exception.connection_invalidated)
else:
sqlalchemy_exception = None
original_exception = reraised_exception
@@ -123,7 +125,7 @@ def handle_error(engine, listener):
six.reraise(type(newraise), newraise, sys.exc_info()[2])
else:
six.reraise(type(reraised_exception),
- reraised_exception, sys.exc_info()[2])
+ reraised_exception, sys.exc_info()[2])
def _do_disconnect(self, e):
del self._is_disconnect
@@ -151,8 +153,8 @@ class ExceptionContextImpl(object):
"""
def __init__(self, exception, sqlalchemy_exception,
- connection, cursor, statement, parameters,
- context, is_disconnect):
+ connection, cursor, statement, parameters,
+ context, is_disconnect):
self.connection = connection
self.sqlalchemy_exception = sqlalchemy_exception
self.original_exception = exception
diff --git a/oslo/db/sqlalchemy/exc_filters.py b/oslo/db/sqlalchemy/exc_filters.py
index 20c8155..7b0b0c9 100644
--- a/oslo/db/sqlalchemy/exc_filters.py
+++ b/oslo/db/sqlalchemy/exc_filters.py
@@ -89,16 +89,16 @@ def _deadlock_error(operational_error, match, engine_name, is_disconnect):
@filters("mysql", sqla_exc.IntegrityError,
- r"^.*\b1062\b.*Duplicate entry '(?P<value>[^']+)'"
- r" for key '(?P<columns>[^']+)'.*$")
+ r"^.*\b1062\b.*Duplicate entry '(?P<value>[^']+)'"
+ r" for key '(?P<columns>[^']+)'.*$")
# NOTE(pkholkin): the first regex is suitable only for PostgreSQL 9.x versions
# the second regex is suitable for PostgreSQL 8.x versions
@filters("postgresql", sqla_exc.IntegrityError,
- (r'^.*duplicate\s+key.*"(?P<columns>[^"]+)"\s*\n.*'
- r'Key\s+\((?P<key>.*)\)=\((?P<value>.*)\)\s+already\s+exists.*$',
- r"^.*duplicate\s+key.*\"(?P<columns>[^\"]+)\"\s*\n.*$"))
+ (r'^.*duplicate\s+key.*"(?P<columns>[^"]+)"\s*\n.*'
+ r'Key\s+\((?P<key>.*)\)=\((?P<value>.*)\)\s+already\s+exists.*$',
+ r"^.*duplicate\s+key.*\"(?P<columns>[^\"]+)\"\s*\n.*$"))
def _default_dupe_key_error(integrity_error, match, engine_name,
- is_disconnect):
+ is_disconnect):
"""Filter for MySQL or Postgresql duplicate key error.
note(boris-42): In current versions of DB backends unique constraint
@@ -146,8 +146,8 @@ def _default_dupe_key_error(integrity_error, match, engine_name,
@filters("sqlite", sqla_exc.IntegrityError,
- (r"^.*columns?(?P<columns>[^)]+)(is|are)\s+not\s+unique$",
- r"^.*UNIQUE\s+constraint\s+failed:\s+(?P<columns>.+)$"))
+ (r"^.*columns?(?P<columns>[^)]+)(is|are)\s+not\s+unique$",
+ r"^.*UNIQUE\s+constraint\s+failed:\s+(?P<columns>.+)$"))
def _sqlite_dupe_key_error(integrity_error, match, engine_name, is_disconnect):
"""Filter for SQLite duplicate key error.
@@ -237,7 +237,8 @@ def _raise_mysql_table_doesnt_exist_asis(
@filters("*", sqla_exc.OperationalError, r".*")
def _raise_operational_errors_directly_filter(operational_error,
- match, engine_name, is_disconnect):
+ match, engine_name,
+ is_disconnect):
"""Filter for all remaining OperationalError classes and apply.
Filter for all remaining OperationalError classes and apply
@@ -257,7 +258,7 @@ def _raise_operational_errors_directly_filter(operational_error,
@filters("mysql", sqla_exc.OperationalError, r".*\((?:2002|2003|2006|2013)")
@filters("ibm_db_sa", sqla_exc.OperationalError, r".*(?:-30081)")
def _is_db_connection_error(operational_error, match, engine_name,
- is_disconnect):
+ is_disconnect):
"""Detect the exception as indicating a recoverable error on connect."""
raise exception.DBConnectionError(operational_error)
diff --git a/oslo/db/sqlalchemy/session.py b/oslo/db/sqlalchemy/session.py
index 2ff3caa..895eda3 100644
--- a/oslo/db/sqlalchemy/session.py
+++ b/oslo/db/sqlalchemy/session.py
@@ -548,7 +548,7 @@ def _add_trace_comments(engine):
@sqlalchemy.event.listens_for(engine, "before_cursor_execute", retval=True)
def before_cursor_execute(conn, cursor, statement, parameters, context,
- executemany):
+ executemany):
# NOTE(zzzeek) - if different steps per DB dialect are desirable
# here, switch out on engine.name for now.
diff --git a/oslo/db/sqlalchemy/test_migrations.py b/oslo/db/sqlalchemy/test_migrations.py
index d669577..1d29075 100644
--- a/oslo/db/sqlalchemy/test_migrations.py
+++ b/oslo/db/sqlalchemy/test_migrations.py
@@ -194,7 +194,7 @@ class WalkVersionsMixin(object):
try:
self.migration_api.downgrade(self.migrate_engine,
- self.REPOSITORY, version)
+ self.REPOSITORY, version)
except NotImplementedError:
# NOTE(sirp): some migrations, namely release-level
# migrations, don't support a downgrade.
@@ -234,7 +234,7 @@ class WalkVersionsMixin(object):
data = pre_upgrade(self.migrate_engine)
self.migration_api.upgrade(self.migrate_engine,
- self.REPOSITORY, version)
+ self.REPOSITORY, version)
self.assertEqual(version,
self.migration_api.db_version(self.migrate_engine,
self.REPOSITORY))
diff --git a/tests/sqlalchemy/test_exc_filters.py b/tests/sqlalchemy/test_exc_filters.py
index 8e1e1ad..0f14cbb 100644
--- a/tests/sqlalchemy/test_exc_filters.py
+++ b/tests/sqlalchemy/test_exc_filters.py
@@ -65,8 +65,9 @@ class TestsExceptionFilter(test_base.DbTestCase):
def _dbapi_fixture(self, dialect_name):
engine = self.engine
with contextlib.nested(
- mock.patch.object(engine.dialect.dbapi, "Error",
- self.Error),
+ mock.patch.object(engine.dialect.dbapi,
+ "Error",
+ self.Error),
mock.patch.object(engine.dialect, "name", dialect_name),
):
yield
@@ -88,16 +89,18 @@ class TestsExceptionFilter(test_base.DbTestCase):
mock.patch.object(engine.dialect, "do_execute", do_execute),
# replace the whole DBAPI rather than patching "Error"
# as some DBAPIs might not be patchable (?)
- mock.patch.object(engine.dialect, "dbapi",
- mock.Mock(Error=self.Error)),
+ mock.patch.object(engine.dialect,
+ "dbapi",
+ mock.Mock(Error=self.Error)),
mock.patch.object(engine.dialect, "name", dialect_name),
- mock.patch.object(engine.dialect, "is_disconnect",
- lambda *args: is_disconnect)
+ mock.patch.object(engine.dialect,
+ "is_disconnect",
+ lambda *args: is_disconnect)
):
yield
def _run_test(self, dialect_name, statement, raises, expected,
- is_disconnect=False, params=()):
+ is_disconnect=False, params=()):
with self._fixture(dialect_name, raises, is_disconnect=is_disconnect):
with self.engine.connect() as conn:
matched = self.assertRaises(
@@ -269,7 +272,8 @@ class TestRaiseReferenceError(TestsExceptionFilter):
class TestDuplicate(TestsExceptionFilter):
def _run_dupe_constraint_test(self, dialect_name, message,
- expected_columns=['a', 'b'], expected_value=None):
+ expected_columns=['a', 'b'],
+ expected_value=None):
matched = self._run_test(
dialect_name, "insert into table some_values",
self.IntegrityError(message),
@@ -279,7 +283,7 @@ class TestDuplicate(TestsExceptionFilter):
self.assertEqual(expected_value, matched.value)
def _not_dupe_constraint_test(self, dialect_name, statement, message,
- expected_cls, expected_message):
+ expected_cls, expected_message):
matched = self._run_test(
dialect_name, statement,
self.IntegrityError(message),
@@ -291,16 +295,19 @@ class TestDuplicate(TestsExceptionFilter):
self._run_dupe_constraint_test("sqlite", 'column a, b are not unique')
def test_sqlite_3_7_16_or_3_8_2_and_higher(self):
- self._run_dupe_constraint_test("sqlite",
+ self._run_dupe_constraint_test(
+ "sqlite",
'UNIQUE constraint failed: tbl.a, tbl.b')
def test_mysql_mysqldb(self):
- self._run_dupe_constraint_test("mysql",
+ self._run_dupe_constraint_test(
+ "mysql",
'(1062, "Duplicate entry '
'\'2-3\' for key \'uniq_tbl0a0b\'")', expected_value='2-3')
def test_mysql_mysqlconnector(self):
- self._run_dupe_constraint_test("mysql",
+ self._run_dupe_constraint_test(
+ "mysql",
'1062 (23000): Duplicate entry '
'\'2-3\' for key \'uniq_tbl0a0b\'")', expected_value='2-3')
@@ -314,7 +321,8 @@ class TestDuplicate(TestsExceptionFilter):
)
def test_mysql_single(self):
- self._run_dupe_constraint_test("mysql",
+ self._run_dupe_constraint_test(
+ "mysql",
"1062 (23000): Duplicate entry '2' for key 'b'",
expected_columns=['b'],
expected_value='2'
@@ -366,7 +374,8 @@ class TestDuplicate(TestsExceptionFilter):
class TestDeadlock(TestsExceptionFilter):
- def _run_deadlock_detect_test(self, dialect_name, message,
+ def _run_deadlock_detect_test(
+ self, dialect_name, message,
orig_exception_cls=TestsExceptionFilter.OperationalError):
statement = ('SELECT quota_usages.created_at AS '
'quota_usages_created_at FROM quota_usages \n'
@@ -383,7 +392,8 @@ class TestDeadlock(TestsExceptionFilter):
params=params
)
- def _not_deadlock_test(self, dialect_name, message,
+ def _not_deadlock_test(
+ self, dialect_name, message,
expected_cls, expected_message,
orig_exception_cls=TestsExceptionFilter.OperationalError):
statement = ('SELECT quota_usages.created_at AS '
@@ -474,13 +484,14 @@ class IntegrationTest(test_base.DbTestCase):
def setUp(self):
super(IntegrationTest, self).setUp()
meta = sqla.MetaData()
- self.test_table = sqla.Table(_TABLE_NAME, meta,
- sqla.Column('id', sqla.Integer,
- primary_key=True, nullable=False),
- sqla.Column('counter', sqla.Integer,
- nullable=False),
- sqla.UniqueConstraint('counter',
- name='uniq_counter'))
+ self.test_table = sqla.Table(
+ _TABLE_NAME, meta,
+ sqla.Column('id', sqla.Integer,
+ primary_key=True, nullable=False),
+ sqla.Column('counter', sqla.Integer,
+ nullable=False),
+ sqla.UniqueConstraint('counter',
+ name='uniq_counter'))
self.test_table.create(self.engine)
self.addCleanup(self.test_table.drop, self.engine)
@@ -523,7 +534,7 @@ class IntegrationTest(test_base.DbTestCase):
_session.add(foo)
self.assertTrue(_session.autoflush)
self.assertRaises(exception.DBDuplicateEntry,
- _session.query(self.Foo).all)
+ _session.query(self.Foo).all)
def test_flush_wrapper_plain_integrity_error(self):
"""test a plain integrity error wrapped as DBError."""
@@ -587,9 +598,11 @@ class TestDBDisconnected(TestsExceptionFilter):
with self._dbapi_fixture(dialect_name):
with contextlib.nested(
mock.patch.object(engine.dialect,
- "do_execute", fake_do_execute),
- mock.patch.object(engine.dialect, "is_disconnect",
- mock.Mock(return_value=True))
+ "do_execute",
+ fake_do_execute),
+ mock.patch.object(engine.dialect,
+ "is_disconnect",
+ mock.Mock(return_value=True))
):
yield
diff --git a/tests/sqlalchemy/test_handle_error.py b/tests/sqlalchemy/test_handle_error.py
index a9e6f62..2bdb6b0 100644
--- a/tests/sqlalchemy/test_handle_error.py
+++ b/tests/sqlalchemy/test_handle_error.py
@@ -55,7 +55,7 @@ class ExceptionReraiseTest(test_base.BaseTestCase):
self._fixture()
with mock.patch.object(self.engine.dialect.execution_ctx_cls,
- "handle_dbapi_exception") as patched:
+ "handle_dbapi_exception") as patched:
matchee = self.assertRaises(
MyException,
@@ -68,7 +68,7 @@ class ExceptionReraiseTest(test_base.BaseTestCase):
self._fixture()
with mock.patch.object(self.engine.dialect.execution_ctx_cls,
- "handle_dbapi_exception") as patched:
+ "handle_dbapi_exception") as patched:
self.assertRaises(
sqla.exc.DBAPIError,
@@ -156,8 +156,9 @@ class ExceptionReraiseTest(test_base.BaseTestCase):
# done the invalidation.
expect_failure = not utils.sqla_097 and orig_error and not evt_value
- with mock.patch.object(engine.dialect, "is_disconnect",
- mock.Mock(return_value=orig_error)):
+ with mock.patch.object(engine.dialect,
+ "is_disconnect",
+ mock.Mock(return_value=orig_error)):
with engine.connect() as c:
conn_rec = c.connection._connection_record
@@ -179,7 +180,8 @@ class ExceptionReraiseTest(test_base.BaseTestCase):
except NotImplementedError as ne:
self.assertTrue(expect_failure)
- self.assertEqual(str(ne),
+ self.assertEqual(
+ str(ne),
"Can't reset 'disconnect' status of exception once it "
"is set with this version of SQLAlchemy")
diff --git a/tests/sqlalchemy/test_migrations.py b/tests/sqlalchemy/test_migrations.py
index d5afc91..bb50e4e 100644
--- a/tests/sqlalchemy/test_migrations.py
+++ b/tests/sqlalchemy/test_migrations.py
@@ -54,7 +54,8 @@ class TestWalkVersions(test.BaseTestCase, migrate.WalkVersionsMixin):
{'version': version, 'engine': self.engine})
with mock.patch.object(self.migration_api,
- 'upgrade', side_effect=exc.DbMigrationError):
+ 'upgrade',
+ side_effect=exc.DbMigrationError):
log = self.useFixture(fixtures.FakeLogger())
self.assertRaises(exc.DbMigrationError, self._migrate_up, version)
self.assertEqual(expected_output, log.output)
@@ -80,7 +81,8 @@ class TestWalkVersions(test.BaseTestCase, migrate.WalkVersionsMixin):
def test_migrate_down_not_implemented(self):
with mock.patch.object(self.migration_api,
- 'downgrade', side_effect=NotImplementedError):
+ 'downgrade',
+ side_effect=NotImplementedError):
self.assertFalse(self._migrate_down(self.engine, 42))
def test_migrate_down_with_data(self):
diff --git a/tests/sqlalchemy/test_options.py b/tests/sqlalchemy/test_options.py
index 585b67a..aa882ad 100644
--- a/tests/sqlalchemy/test_options.py
+++ b/tests/sqlalchemy/test_options.py
@@ -73,14 +73,14 @@ pool_timeout=7
def test_dbapi_database_deprecated_parameters(self):
path = self.create_tempfiles([['tmp', b'[DATABASE]\n'
- b'sql_connection=fake_connection\n'
- b'sql_idle_timeout=100\n'
- b'sql_min_pool_size=99\n'
- b'sql_max_pool_size=199\n'
- b'sql_max_retries=22\n'
- b'reconnect_interval=17\n'
- b'sqlalchemy_max_overflow=101\n'
- b'sqlalchemy_pool_timeout=5\n'
+ b'sql_connection=fake_connection\n'
+ b'sql_idle_timeout=100\n'
+ b'sql_min_pool_size=99\n'
+ b'sql_max_pool_size=199\n'
+ b'sql_max_retries=22\n'
+ b'reconnect_interval=17\n'
+ b'sqlalchemy_max_overflow=101\n'
+ b'sqlalchemy_pool_timeout=5\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'fake_connection')
@@ -94,8 +94,8 @@ pool_timeout=7
def test_dbapi_database_deprecated_parameters_sql(self):
path = self.create_tempfiles([['tmp', b'[sql]\n'
- b'connection=test_sql_connection\n'
- b'idle_timeout=99\n'
+ b'connection=test_sql_connection\n'
+ b'idle_timeout=99\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'test_sql_connection')
diff --git a/tests/sqlalchemy/test_utils.py b/tests/sqlalchemy/test_utils.py
index 3395163..e1285f9 100644
--- a/tests/sqlalchemy/test_utils.py
+++ b/tests/sqlalchemy/test_utils.py
@@ -831,7 +831,7 @@ class TestUtils(db_test_base.DbTestCase):
@mock.patch('oslo.db.sqlalchemy.utils.add_index')
def test_change_index_columns(self, add_index, drop_index):
utils.change_index_columns(self.engine, 'test_table', 'a_index',
- ('a',))
+ ('a',))
utils.drop_index.assert_called_once_with(self.engine, 'test_table',
'a_index')
utils.add_index.assert_called_once_with(self.engine, 'test_table',
diff --git a/tox.ini b/tox.ini
index 09d9bdc..8542471 100644
--- a/tox.ini
+++ b/tox.ini
@@ -45,7 +45,7 @@ commands =
# see https://bugs.launchpad.net/hacking/+bug/1329363
show-source = True
-ignore = E123,E125,E128,E265,H305,H307,H803,H904
+ignore = E123,E125,E265,H305,H307,H803,H904
builtins = _
exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build