summaryrefslogtreecommitdiff
path: root/oslo_db/sqlalchemy
diff options
context:
space:
mode:
authorStephen Finucane <stephenfin@redhat.com>2021-07-19 11:48:58 +0100
committerStephen Finucane <stephenfin@redhat.com>2021-08-10 17:38:48 +0100
commitdf901a1c765c16de77c2f734760e9ecf95483737 (patch)
treed513efdd21018a0c18a6fa5396af350a2d470761 /oslo_db/sqlalchemy
parentecb15c4ac63772e3332ac0f475e852f414ca3bbb (diff)
downloadoslo-db-df901a1c765c16de77c2f734760e9ecf95483737.tar.gz
Replace use of 'Engine.execute()'
Resolve the following RemovedIn20Warning warning: The Engine.execute() method is considered legacy as of the 1.x series of SQLAlchemy and will be removed in 2.0. All statement execution in SQLAlchemy 2.0 is performed by the Connection.execute() method of Connection, or in the ORM by the Session.execute() method of Session. Change-Id: I4c47a690a94abcb3b4b6fb087a1bf86c5350b523 Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
Diffstat (limited to 'oslo_db/sqlalchemy')
-rw-r--r--oslo_db/sqlalchemy/utils.py58
1 files changed, 31 insertions, 27 deletions
diff --git a/oslo_db/sqlalchemy/utils.py b/oslo_db/sqlalchemy/utils.py
index 90236d5..333fff0 100644
--- a/oslo_db/sqlalchemy/utils.py
+++ b/oslo_db/sqlalchemy/utils.py
@@ -490,32 +490,34 @@ def drop_old_duplicate_entries_from_table(engine, table_name,
func.count(table.c.id) > 1
)
- for row in engine.execute(duplicated_rows_select).fetchall():
- # NOTE(boris-42): Do not remove row that has the biggest ID.
- delete_condition = table.c.id != row[0]
- is_none = None # workaround for pyflakes
- delete_condition &= table.c.deleted_at == is_none
- for name in uc_column_names:
- delete_condition &= table.c[name] == row._mapping[name]
-
- rows_to_delete_select = sqlalchemy.sql.select(
- table.c.id,
- ).where(delete_condition)
- for row in engine.execute(rows_to_delete_select).fetchall():
- LOG.info("Deleting duplicated row with id: %(id)s from table: "
- "%(table)s", dict(id=row[0], table=table_name))
-
- if use_soft_delete:
- delete_statement = table.update().\
- where(delete_condition).\
- values({
- 'deleted': literal_column('id'),
- 'updated_at': literal_column('updated_at'),
- 'deleted_at': timeutils.utcnow()
- })
- else:
- delete_statement = table.delete().where(delete_condition)
- engine.execute(delete_statement)
+ with engine.connect() as conn:
+ for row in conn.execute(duplicated_rows_select).fetchall():
+ # NOTE(boris-42): Do not remove row that has the biggest ID.
+ delete_condition = table.c.id != row[0]
+ is_none = None # workaround for pyflakes
+ delete_condition &= table.c.deleted_at == is_none
+ for name in uc_column_names:
+ delete_condition &= table.c[name] == row._mapping[name]
+
+ rows_to_delete_select = sqlalchemy.sql.select(
+ table.c.id,
+ ).where(delete_condition)
+ for row in conn.execute(rows_to_delete_select).fetchall():
+ LOG.info(
+ "Deleting duplicated row with id: %(id)s from table: "
+ "%(table)s", dict(id=row[0], table=table_name))
+
+ if use_soft_delete:
+ delete_statement = table.update().\
+ where(delete_condition).\
+ values({
+ 'deleted': literal_column('id'),
+ 'updated_at': literal_column('updated_at'),
+ 'deleted_at': timeutils.utcnow()
+ })
+ else:
+ delete_statement = table.delete().where(delete_condition)
+ conn.execute(delete_statement)
def _get_default_deleted_value(table):
@@ -1118,7 +1120,9 @@ def get_non_innodb_tables(connectable, skip_tables=('migrate_version',
params['database'] = connectable.engine.url.database
query = text(query_str)
- noninnodb = connectable.execute(query, params)
+ # TODO(stephenfin): What about if this is already a Connection?
+ with connectable.connect() as conn:
+ noninnodb = conn.execute(query, params)
return [i[0] for i in noninnodb]