diff options
author | Jenkins <jenkins@review.openstack.org> | 2014-06-14 19:40:30 +0000 |
---|---|---|
committer | Gerrit Code Review <review@openstack.org> | 2014-06-14 19:40:30 +0000 |
commit | fdc8ea09d83bac56a5800599edf817f5ffa32eca (patch) | |
tree | f475dfd547b27a70293c4feee2664e070e37ec14 | |
parent | fb84de0828db118ee8a2f3311e37a58c5b7c8bc1 (diff) | |
parent | 89833d8278e753d6a19645268c1b4181163b9c06 (diff) | |
download | oslo-db-fdc8ea09d83bac56a5800599edf817f5ffa32eca.tar.gz |
Merge "Fix the test using in-file SQLite database"
-rw-r--r-- | oslo/db/sqlalchemy/utils.py | 2 | ||||
-rw-r--r-- | tests/sqlalchemy/test_utils.py | 12 |
2 files changed, 13 insertions, 1 deletions
diff --git a/oslo/db/sqlalchemy/utils.py b/oslo/db/sqlalchemy/utils.py index 43a06c6..83821f7 100644 --- a/oslo/db/sqlalchemy/utils.py +++ b/oslo/db/sqlalchemy/utils.py @@ -418,7 +418,7 @@ def drop_old_duplicate_entries_from_table(migrate_engine, table_name, columns_for_select, group_by=columns_for_group_by, having=func.count(table.c.id) > 1) - for row in migrate_engine.execute(duplicated_rows_select): + for row in migrate_engine.execute(duplicated_rows_select).fetchall(): # NOTE(boris-42): Do not remove row that has the biggest ID. delete_condition = table.c.id != row[0] is_none = None # workaround for pyflakes diff --git a/tests/sqlalchemy/test_utils.py b/tests/sqlalchemy/test_utils.py index ea464e2..a24a83a 100644 --- a/tests/sqlalchemy/test_utils.py +++ b/tests/sqlalchemy/test_utils.py @@ -238,6 +238,18 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): for id_ in expected_ids: self.assertTrue(id_ in real_ids) + def test_drop_dup_entries_in_file_conn(self): + table_name = "__test_tmp_table__" + tmp_db_file = self.create_tempfiles([['name', '']], ext='.sql')[0] + in_file_engine = session.EngineFacade( + 'sqlite:///%s' % tmp_db_file).get_engine() + meta = MetaData() + meta.bind = in_file_engine + test_table, values = self._populate_db_for_drop_duplicate_entries( + in_file_engine, meta, table_name) + utils.drop_old_duplicate_entries_from_table( + in_file_engine, table_name, False, 'b', 'c') + def test_drop_old_duplicate_entries_from_table_soft_delete(self): table_name = "__test_tmp_table__" |