summaryrefslogtreecommitdiff
path: root/oslo_db/sqlalchemy
diff options
context:
space:
mode:
Diffstat (limited to 'oslo_db/sqlalchemy')
-rw-r--r--oslo_db/sqlalchemy/enginefacade.py15
-rw-r--r--oslo_db/sqlalchemy/orm.py2
-rw-r--r--oslo_db/sqlalchemy/test_migrations.py12
-rw-r--r--oslo_db/sqlalchemy/types.py8
-rw-r--r--oslo_db/sqlalchemy/utils.py22
5 files changed, 46 insertions, 13 deletions
diff --git a/oslo_db/sqlalchemy/enginefacade.py b/oslo_db/sqlalchemy/enginefacade.py
index 81341b7..a66528d 100644
--- a/oslo_db/sqlalchemy/enginefacade.py
+++ b/oslo_db/sqlalchemy/enginefacade.py
@@ -163,7 +163,7 @@ class _TransactionFactory(object):
}
self._maker_cfg = {
'expire_on_commit': _Default(False),
- '__autocommit': True
+ '__autocommit': False,
}
self._transaction_ctx_cfg = {
'rollback_reader_sessions': False,
@@ -1275,13 +1275,22 @@ class LegacyEngineFacade(object):
"""
def __init__(self, sql_connection, slave_connection=None,
- sqlite_fk=False, autocommit=True,
+ sqlite_fk=False, autocommit=False,
expire_on_commit=False, _conf=None, _factory=None, **kwargs):
warnings.warn(
"EngineFacade is deprecated; please use "
"oslo_db.sqlalchemy.enginefacade",
warning.OsloDBDeprecationWarning,
stacklevel=2)
+
+ if autocommit is True:
+ warnings.warn(
+ 'autocommit support will be removed in SQLAlchemy 2.0 and '
+ 'should not be relied on; please rework your code to remove '
+ 'reliance on this feature',
+ warning.OsloDBDeprecationWarning,
+ stacklevel=2)
+
if _factory:
self._factory = _factory
else:
@@ -1355,7 +1364,7 @@ class LegacyEngineFacade(object):
@classmethod
def from_config(cls, conf,
- sqlite_fk=False, autocommit=True, expire_on_commit=False):
+ sqlite_fk=False, autocommit=False, expire_on_commit=False):
"""Initialize EngineFacade using oslo.config config instance options.
:param conf: oslo.config config instance
diff --git a/oslo_db/sqlalchemy/orm.py b/oslo_db/sqlalchemy/orm.py
index b1ca00a..a5ec4c4 100644
--- a/oslo_db/sqlalchemy/orm.py
+++ b/oslo_db/sqlalchemy/orm.py
@@ -57,7 +57,7 @@ class Session(sqlalchemy.orm.session.Session):
"""oslo.db-specific Session subclass."""
-def get_maker(engine, autocommit=True, expire_on_commit=False):
+def get_maker(engine, autocommit=False, expire_on_commit=False):
"""Return a SQLAlchemy sessionmaker using the given engine."""
return sqlalchemy.orm.sessionmaker(bind=engine,
class_=Session,
diff --git a/oslo_db/sqlalchemy/test_migrations.py b/oslo_db/sqlalchemy/test_migrations.py
index 74181db..a0b5591 100644
--- a/oslo_db/sqlalchemy/test_migrations.py
+++ b/oslo_db/sqlalchemy/test_migrations.py
@@ -77,7 +77,8 @@ class WalkVersionsMixin(object, metaclass=abc.ABCMeta):
"""
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def INIT_VERSION(self):
"""Initial version of a migration repository.
@@ -87,7 +88,8 @@ class WalkVersionsMixin(object, metaclass=abc.ABCMeta):
"""
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def REPOSITORY(self):
"""Allows basic manipulation with migration repository.
@@ -95,7 +97,8 @@ class WalkVersionsMixin(object, metaclass=abc.ABCMeta):
"""
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def migration_api(self):
"""Provides API for upgrading, downgrading and version manipulations.
@@ -103,7 +106,8 @@ class WalkVersionsMixin(object, metaclass=abc.ABCMeta):
"""
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def migrate_engine(self):
"""Provides engine instance.
diff --git a/oslo_db/sqlalchemy/types.py b/oslo_db/sqlalchemy/types.py
index b30afce..891c73d 100644
--- a/oslo_db/sqlalchemy/types.py
+++ b/oslo_db/sqlalchemy/types.py
@@ -70,6 +70,8 @@ class JsonEncodedDict(JsonEncodedType):
"""
type = dict
+ cache_ok = True
+ """This type is safe to cache."""
class JsonEncodedList(JsonEncodedType):
@@ -82,6 +84,8 @@ class JsonEncodedList(JsonEncodedType):
"""
type = list
+ cache_ok = True
+ """This type is safe to cache."""
class SoftDeleteInteger(TypeDecorator):
@@ -133,9 +137,11 @@ class String(_String):
mysql_ndb_type is used to override the String with another data type.
mysql_ndb_size is used to adjust the length of the String.
-
"""
+ cache_ok = True
+ """This type is safe to cache."""
+
def __init__(
self,
length,
diff --git a/oslo_db/sqlalchemy/utils.py b/oslo_db/sqlalchemy/utils.py
index 09156f5..83a2cbd 100644
--- a/oslo_db/sqlalchemy/utils.py
+++ b/oslo_db/sqlalchemy/utils.py
@@ -39,6 +39,7 @@ from sqlalchemy import Index
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import MetaData
+from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy.sql.expression import cast
from sqlalchemy.sql.expression import literal_column
from sqlalchemy.sql import text
@@ -608,7 +609,14 @@ def _change_deleted_column_type_to_boolean_sqlite(engine, table_name,
# FIXME(stephenfin): We shouldn't be using this private API;
# figure out how else to copy an arbitrary column schema
- constraints = [constraint._copy() for constraint in table.constraints]
+ # NOTE(stephenfin): We drop PrimaryKeyConstraint-type constraints since
+ # these duplicate the 'primary_key=True' attribute on the speicified
+ # column(s). This technically breaks things when the primary key covers
+ # multiple columns but that's okay: these are deprecated APIs
+ constraints = [
+ constraint._copy() for constraint in table.constraints
+ if not isinstance(constraint, PrimaryKeyConstraint)
+ ]
with engine.connect() as conn:
meta = table.metadata
@@ -738,7 +746,10 @@ def _change_deleted_column_type_to_id_type_sqlite(engine, table_name,
constraints = []
for constraint in table.constraints:
- if not _is_deleted_column_constraint(constraint):
+ if not (
+ _is_deleted_column_constraint(constraint) or
+ isinstance(constraint, PrimaryKeyConstraint)
+ ):
# FIXME(stephenfin): We shouldn't be using this private API;
# figure out how else to copy an arbitrary constraint schema
constraints.append(constraint._copy())
@@ -749,7 +760,8 @@ def _change_deleted_column_type_to_id_type_sqlite(engine, table_name,
with conn.begin():
new_table = Table(
table_name + "__tmp__", meta,
- *(columns + constraints))
+ *(columns + constraints),
+ )
new_table.create(conn)
indexes = []
@@ -1169,7 +1181,9 @@ def get_non_ndbcluster_tables(connectable, skip_tables=None):
params['database'] = connectable.engine.url.database
query = text(query_str)
- nonndbcluster = connectable.execute(query, **params)
+ # TODO(stephenfin): What about if this is already a Connection?
+ with connectable.connect() as conn, conn.begin():
+ nonndbcluster = connectable.execute(query, **params)
return [i[0] for i in nonndbcluster]