summaryrefslogtreecommitdiff
path: root/oslo_db
diff options
context:
space:
mode:
authorJenkins <jenkins@review.openstack.org>2016-12-07 11:20:14 +0000
committerGerrit Code Review <review@openstack.org>2016-12-07 11:20:14 +0000
commit6bc62b047b02efd8fd6186ec0935faad975f0ffe (patch)
treec824fa9a22d74a7cfb272555c5de927f2f975804 /oslo_db
parentb3869d04cff7071c1226758eb8b58fde9eba5b8d (diff)
parent4e85efc833501391fee22d680dbc98d9439fa8e5 (diff)
downloadoslo-db-6bc62b047b02efd8fd6186ec0935faad975f0ffe.tar.gz
Merge "Strip prefix `migrate_` in parameter `migrate_engine`"
Diffstat (limited to 'oslo_db')
-rw-r--r--oslo_db/sqlalchemy/utils.py123
1 files changed, 60 insertions, 63 deletions
diff --git a/oslo_db/sqlalchemy/utils.py b/oslo_db/sqlalchemy/utils.py
index 575423b..1dd9767 100644
--- a/oslo_db/sqlalchemy/utils.py
+++ b/oslo_db/sqlalchemy/utils.py
@@ -477,21 +477,21 @@ def _get_not_supported_column(col_name_col_instance, column_name):
return column
-def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
+def drop_old_duplicate_entries_from_table(engine, table_name,
use_soft_delete, *uc_column_names):
"""Drop all old rows having the same values for columns in uc_columns.
This method drop (or mark ad `deleted` if use_soft_delete is True) old
duplicate rows form table with name `table_name`.
- :param migrate_engine: Sqlalchemy engine
+ :param engine: Sqlalchemy engine
:param table_name: Table with duplicates
:param use_soft_delete: If True - values will be marked as `deleted`,
if False - values will be removed from table
:param uc_column_names: Unique constraint columns
"""
meta = MetaData()
- meta.bind = migrate_engine
+ meta.bind = engine
table = Table(table_name, meta, autoload=True)
columns_for_group_by = [table.c[name] for name in uc_column_names]
@@ -503,7 +503,7 @@ def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
columns_for_select, group_by=columns_for_group_by,
having=func.count(table.c.id) > 1)
- for row in migrate_engine.execute(duplicated_rows_select).fetchall():
+ for row in engine.execute(duplicated_rows_select).fetchall():
# NOTE(boris-42): Do not remove row that has the biggest ID.
delete_condition = table.c.id != row[0]
is_none = None # workaround for pyflakes
@@ -513,7 +513,7 @@ def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
rows_to_delete_select = sqlalchemy.sql.select(
[table.c.id]).where(delete_condition)
- for row in migrate_engine.execute(rows_to_delete_select).fetchall():
+ for row in engine.execute(rows_to_delete_select).fetchall():
LOG.info(_LI("Deleting duplicated row with id: %(id)s from table: "
"%(table)s"), dict(id=row[0], table=table_name))
@@ -527,7 +527,7 @@ def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
})
else:
delete_statement = table.delete().where(delete_condition)
- migrate_engine.execute(delete_statement)
+ engine.execute(delete_statement)
def _get_default_deleted_value(table):
@@ -538,10 +538,10 @@ def _get_default_deleted_value(table):
raise exception.ColumnError(_("Unsupported id columns type"))
-def _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes):
- table = get_table(migrate_engine, table_name)
+def _restore_indexes_on_deleted_columns(engine, table_name, indexes):
+ table = get_table(engine, table_name)
- real_indexes = get_indexes(migrate_engine, table_name)
+ real_indexes = get_indexes(engine, table_name)
existing_index_names = dict(
[(index['name'], index['column_names']) for index in real_indexes])
@@ -553,21 +553,20 @@ def _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes):
if name in existing_index_names:
column_names = [table.c[c] for c in existing_index_names[name]]
old_index = Index(name, *column_names, unique=index["unique"])
- old_index.drop(migrate_engine)
+ old_index.drop(engine)
column_names = [table.c[c] for c in index['column_names']]
new_index = Index(index["name"], *column_names, unique=index["unique"])
- new_index.create(migrate_engine)
+ new_index.create(engine)
-def change_deleted_column_type_to_boolean(migrate_engine, table_name,
+def change_deleted_column_type_to_boolean(engine, table_name,
**col_name_col_instance):
- if migrate_engine.name == "sqlite":
+ if engine.name == "sqlite":
return _change_deleted_column_type_to_boolean_sqlite(
- migrate_engine, table_name, **col_name_col_instance)
- indexes = get_indexes(migrate_engine, table_name)
-
- table = get_table(migrate_engine, table_name)
+ engine, table_name, **col_name_col_instance)
+ indexes = get_indexes(engine, table_name)
+ table = get_table(engine, table_name)
old_deleted = Column('old_deleted', Boolean, default=False)
old_deleted.create(table, populate_default=False)
@@ -580,13 +579,12 @@ def change_deleted_column_type_to_boolean(migrate_engine, table_name,
table.c.deleted.drop()
table.c.old_deleted.alter(name="deleted")
- _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes)
+ _restore_indexes_on_deleted_columns(engine, table_name, indexes)
-def _change_deleted_column_type_to_boolean_sqlite(migrate_engine, table_name,
+def _change_deleted_column_type_to_boolean_sqlite(engine, table_name,
**col_name_col_instance):
- table = get_table(migrate_engine, table_name)
-
+ table = get_table(engine, table_name)
columns = []
for column in table.columns:
column_copy = None
@@ -608,7 +606,7 @@ def _change_deleted_column_type_to_boolean_sqlite(migrate_engine, table_name,
new_table.create()
indexes = []
- for index in get_indexes(migrate_engine, table_name):
+ for index in get_indexes(engine, table_name):
column_names = [new_table.c[c] for c in index['column_names']]
indexes.append(Index(index["name"], *column_names,
unique=index["unique"]))
@@ -621,11 +619,11 @@ def _change_deleted_column_type_to_boolean_sqlite(migrate_engine, table_name,
c_select.append(table.c.deleted == table.c.id)
ins = InsertFromSelect(new_table, sqlalchemy.sql.select(c_select))
- migrate_engine.execute(ins)
+ engine.execute(ins)
table.drop()
for index in indexes:
- index.create(migrate_engine)
+ index.create(engine)
new_table.rename(table_name)
new_table.update().\
@@ -634,14 +632,13 @@ def _change_deleted_column_type_to_boolean_sqlite(migrate_engine, table_name,
execute()
-def change_deleted_column_type_to_id_type(migrate_engine, table_name,
+def change_deleted_column_type_to_id_type(engine, table_name,
**col_name_col_instance):
- if migrate_engine.name == "sqlite":
+ if engine.name == "sqlite":
return _change_deleted_column_type_to_id_type_sqlite(
- migrate_engine, table_name, **col_name_col_instance)
- indexes = get_indexes(migrate_engine, table_name)
-
- table = get_table(migrate_engine, table_name)
+ engine, table_name, **col_name_col_instance)
+ indexes = get_indexes(engine, table_name)
+ table = get_table(engine, table_name)
new_deleted = Column('new_deleted', table.c.id.type,
default=_get_default_deleted_value(table))
@@ -655,7 +652,7 @@ def change_deleted_column_type_to_id_type(migrate_engine, table_name,
table.c.deleted.drop()
table.c.new_deleted.alter(name="deleted")
- _restore_indexes_on_deleted_columns(migrate_engine, table_name, indexes)
+ _restore_indexes_on_deleted_columns(engine, table_name, indexes)
def _is_deleted_column_constraint(constraint):
@@ -671,7 +668,7 @@ def _is_deleted_column_constraint(constraint):
return bool(re.match(r".*deleted in \(.*\)", sqltext, re.I))
-def _change_deleted_column_type_to_id_type_sqlite(migrate_engine, table_name,
+def _change_deleted_column_type_to_id_type_sqlite(engine, table_name,
**col_name_col_instance):
# NOTE(boris-42): sqlaclhemy-migrate can't drop column with check
# constraints in sqlite DB and our `deleted` column has
@@ -682,7 +679,7 @@ def _change_deleted_column_type_to_id_type_sqlite(migrate_engine, table_name,
# 2) Copy all data from old to new table.
# 3) Drop old table.
# 4) Rename new table to old table name.
- meta = MetaData(bind=migrate_engine)
+ meta = MetaData(bind=engine)
table = Table(table_name, meta, autoload=True)
default_deleted_value = _get_default_deleted_value(table)
@@ -710,17 +707,17 @@ def _change_deleted_column_type_to_id_type_sqlite(migrate_engine, table_name,
new_table.create()
indexes = []
- for index in get_indexes(migrate_engine, table_name):
+ for index in get_indexes(engine, table_name):
column_names = [new_table.c[c] for c in index['column_names']]
indexes.append(Index(index["name"], *column_names,
unique=index["unique"]))
ins = InsertFromSelect(new_table, table.select())
- migrate_engine.execute(ins)
+ engine.execute(ins)
table.drop()
for index in indexes:
- index.create(migrate_engine)
+ index.create(engine)
new_table.rename(table_name)
deleted = True # workaround for pyflakes
@@ -811,14 +808,14 @@ def get_indexes(engine, table_name):
return indexes
-def index_exists(migrate_engine, table_name, index_name):
+def index_exists(engine, table_name, index_name):
"""Check if given index exists.
- :param migrate_engine: sqlalchemy engine
- :param table_name: name of the table
- :param index_name: name of the index
+ :param engine: sqlalchemy engine
+ :param table_name: name of the table
+ :param index_name: name of the index
"""
- indexes = get_indexes(migrate_engine, table_name)
+ indexes = get_indexes(engine, table_name)
index_names = [index['name'] for index in indexes]
return index_name in index_names
@@ -838,16 +835,16 @@ def index_exists_on_columns(engine, table_name, columns):
return False
-def add_index(migrate_engine, table_name, index_name, idx_columns):
+def add_index(engine, table_name, index_name, idx_columns):
"""Create an index for given columns.
- :param migrate_engine: sqlalchemy engine
- :param table_name: name of the table
- :param index_name: name of the index
- :param idx_columns: tuple with names of columns that will be indexed
+ :param engine: sqlalchemy engine
+ :param table_name: name of the table
+ :param index_name: name of the index
+ :param idx_columns: tuple with names of columns that will be indexed
"""
- table = get_table(migrate_engine, table_name)
- if not index_exists(migrate_engine, table_name, index_name):
+ table = get_table(engine, table_name)
+ if not index_exists(engine, table_name, index_name):
index = Index(
index_name, *[getattr(table.c, col) for col in idx_columns]
)
@@ -856,14 +853,14 @@ def add_index(migrate_engine, table_name, index_name, idx_columns):
raise ValueError("Index '%s' already exists!" % index_name)
-def drop_index(migrate_engine, table_name, index_name):
+def drop_index(engine, table_name, index_name):
"""Drop index with given name.
- :param migrate_engine: sqlalchemy engine
- :param table_name: name of the table
- :param index_name: name of the index
+ :param engine: sqlalchemy engine
+ :param table_name: name of the table
+ :param index_name: name of the index
"""
- table = get_table(migrate_engine, table_name)
+ table = get_table(engine, table_name)
for index in table.indexes:
if index.name == index_name:
index.drop()
@@ -872,24 +869,24 @@ def drop_index(migrate_engine, table_name, index_name):
raise ValueError("Index '%s' not found!" % index_name)
-def change_index_columns(migrate_engine, table_name, index_name, new_columns):
+def change_index_columns(engine, table_name, index_name, new_columns):
"""Change set of columns that are indexed by given index.
- :param migrate_engine: sqlalchemy engine
- :param table_name: name of the table
- :param index_name: name of the index
- :param new_columns: tuple with names of columns that will be indexed
+ :param engine: sqlalchemy engine
+ :param table_name: name of the table
+ :param index_name: name of the index
+ :param new_columns: tuple with names of columns that will be indexed
"""
- drop_index(migrate_engine, table_name, index_name)
- add_index(migrate_engine, table_name, index_name, new_columns)
+ drop_index(engine, table_name, index_name)
+ add_index(engine, table_name, index_name, new_columns)
def column_exists(engine, table_name, column):
"""Check if table has given column.
- :param engine: sqlalchemy engine
- :param table_name: name of the table
- :param column: name of the colmn
+ :param engine: sqlalchemy engine
+ :param table_name: name of the table
+ :param column: name of the colmn
"""
t = get_table(engine, table_name)
return column in t.c