summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIlya Pekelny <ipekelny@mirantis.com>2014-05-13 14:40:47 +0300
committerIlya Pekelny <ipekelny@mirantis.com>2014-05-29 18:21:40 +0300
commit89833d8278e753d6a19645268c1b4181163b9c06 (patch)
tree26c55036aa7f4f88121c0f658d040d3f6d86fc83
parentae8e3ef410fdc32efa92d6ae5399a55353b5931a (diff)
downloadoslo-db-89833d8278e753d6a19645268c1b4181163b9c06.tar.gz
Fix the test using in-file SQLite database
SQLite in-file uses exclusively lock for database, so the connection must be only one. Change-Id: I9aad455d1466e4b17579253af47bf1b7e51dbc49 Closes-Bug: #1308032
-rw-r--r--oslo/db/sqlalchemy/utils.py2
-rw-r--r--tests/sqlalchemy/test_utils.py12
2 files changed, 13 insertions, 1 deletions
diff --git a/oslo/db/sqlalchemy/utils.py b/oslo/db/sqlalchemy/utils.py
index fc9f64c..3099f64 100644
--- a/oslo/db/sqlalchemy/utils.py
+++ b/oslo/db/sqlalchemy/utils.py
@@ -373,7 +373,7 @@ def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
columns_for_select, group_by=columns_for_group_by,
having=func.count(table.c.id) > 1)
- for row in migrate_engine.execute(duplicated_rows_select):
+ for row in migrate_engine.execute(duplicated_rows_select).fetchall():
# NOTE(boris-42): Do not remove row that has the biggest ID.
delete_condition = table.c.id != row[0]
is_none = None # workaround for pyflakes
diff --git a/tests/sqlalchemy/test_utils.py b/tests/sqlalchemy/test_utils.py
index d1b82d8..30ffd45 100644
--- a/tests/sqlalchemy/test_utils.py
+++ b/tests/sqlalchemy/test_utils.py
@@ -239,6 +239,18 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
for id_ in expected_ids:
self.assertTrue(id_ in real_ids)
+ def test_drop_dup_entries_in_file_conn(self):
+ table_name = "__test_tmp_table__"
+ tmp_db_file = self.create_tempfiles([['name', '']], ext='.sql')[0]
+ in_file_engine = session.EngineFacade(
+ 'sqlite:///%s' % tmp_db_file).get_engine()
+ meta = MetaData()
+ meta.bind = in_file_engine
+ test_table, values = self._populate_db_for_drop_duplicate_entries(
+ in_file_engine, meta, table_name)
+ utils.drop_old_duplicate_entries_from_table(
+ in_file_engine, table_name, False, 'b', 'c')
+
def test_drop_old_duplicate_entries_from_table_soft_delete(self):
table_name = "__test_tmp_table__"