diff options
author | Zuul <zuul@review.opendev.org> | 2021-11-15 14:59:26 +0000 |
---|---|---|
committer | Gerrit Code Review <review@openstack.org> | 2021-11-15 14:59:26 +0000 |
commit | 2fbda1b67193280d2663886031a1152ef6f2927a (patch) | |
tree | faea7d3c3bde55df1d35a08d02afcb2a73da0e4e /oslo_db/sqlalchemy/utils.py | |
parent | 6be85c15ed1682f1e7ec5df6c5d40c888ca64539 (diff) | |
parent | df901a1c765c16de77c2f734760e9ecf95483737 (diff) | |
download | oslo-db-2fbda1b67193280d2663886031a1152ef6f2927a.tar.gz |
Merge "Replace use of 'Engine.execute()'"11.1.0
Diffstat (limited to 'oslo_db/sqlalchemy/utils.py')
-rw-r--r-- | oslo_db/sqlalchemy/utils.py | 58 |
1 files changed, 31 insertions, 27 deletions
diff --git a/oslo_db/sqlalchemy/utils.py b/oslo_db/sqlalchemy/utils.py index 90236d5..333fff0 100644 --- a/oslo_db/sqlalchemy/utils.py +++ b/oslo_db/sqlalchemy/utils.py @@ -490,32 +490,34 @@ def drop_old_duplicate_entries_from_table(engine, table_name, func.count(table.c.id) > 1 ) - for row in engine.execute(duplicated_rows_select).fetchall(): - # NOTE(boris-42): Do not remove row that has the biggest ID. - delete_condition = table.c.id != row[0] - is_none = None # workaround for pyflakes - delete_condition &= table.c.deleted_at == is_none - for name in uc_column_names: - delete_condition &= table.c[name] == row._mapping[name] - - rows_to_delete_select = sqlalchemy.sql.select( - table.c.id, - ).where(delete_condition) - for row in engine.execute(rows_to_delete_select).fetchall(): - LOG.info("Deleting duplicated row with id: %(id)s from table: " - "%(table)s", dict(id=row[0], table=table_name)) - - if use_soft_delete: - delete_statement = table.update().\ - where(delete_condition).\ - values({ - 'deleted': literal_column('id'), - 'updated_at': literal_column('updated_at'), - 'deleted_at': timeutils.utcnow() - }) - else: - delete_statement = table.delete().where(delete_condition) - engine.execute(delete_statement) + with engine.connect() as conn: + for row in conn.execute(duplicated_rows_select).fetchall(): + # NOTE(boris-42): Do not remove row that has the biggest ID. + delete_condition = table.c.id != row[0] + is_none = None # workaround for pyflakes + delete_condition &= table.c.deleted_at == is_none + for name in uc_column_names: + delete_condition &= table.c[name] == row._mapping[name] + + rows_to_delete_select = sqlalchemy.sql.select( + table.c.id, + ).where(delete_condition) + for row in conn.execute(rows_to_delete_select).fetchall(): + LOG.info( + "Deleting duplicated row with id: %(id)s from table: " + "%(table)s", dict(id=row[0], table=table_name)) + + if use_soft_delete: + delete_statement = table.update().\ + where(delete_condition).\ + values({ + 'deleted': literal_column('id'), + 'updated_at': literal_column('updated_at'), + 'deleted_at': timeutils.utcnow() + }) + else: + delete_statement = table.delete().where(delete_condition) + conn.execute(delete_statement) def _get_default_deleted_value(table): @@ -1118,7 +1120,9 @@ def get_non_innodb_tables(connectable, skip_tables=('migrate_version', params['database'] = connectable.engine.url.database query = text(query_str) - noninnodb = connectable.execute(query, params) + # TODO(stephenfin): What about if this is already a Connection? + with connectable.connect() as conn: + noninnodb = conn.execute(query, params) return [i[0] for i in noninnodb] |