summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/build/Makefile2
-rw-r--r--doc/build/changelog/index.rst8
-rw-r--r--doc/build/changelog/migration_20.rst1038
-rw-r--r--doc/build/conf.py2
-rw-r--r--doc/build/core/tutorial.rst27
-rw-r--r--doc/build/errors.rst37
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py6
-rw-r--r--lib/sqlalchemy/engine/default.py1
-rw-r--r--lib/sqlalchemy/exc.py17
-rw-r--r--lib/sqlalchemy/future/__init__.py15
-rw-r--r--lib/sqlalchemy/orm/query.py13
-rw-r--r--lib/sqlalchemy/sql/dml.py178
-rw-r--r--lib/sqlalchemy/sql/functions.py2
-rw-r--r--lib/sqlalchemy/sql/selectable.py125
-rw-r--r--lib/sqlalchemy/testing/warnings.py2
-rw-r--r--lib/sqlalchemy/util/__init__.py4
-rw-r--r--lib/sqlalchemy/util/deprecations.py86
-rw-r--r--test/orm/inheritance/test_single.py2
-rw-r--r--test/orm/test_deferred.py2
-rw-r--r--test/orm/test_deprecations.py14
-rw-r--r--test/orm/test_froms.py39
-rw-r--r--test/orm/test_query.py4
-rw-r--r--test/sql/test_compiler.py6
-rw-r--r--test/sql/test_deprecations.py21
-rw-r--r--test/sql/test_external_traversal.py2
-rw-r--r--test/sql/test_selectable.py21
-rw-r--r--test/sql/test_text.py3
-rw-r--r--test/sql/test_update.py61
29 files changed, 1583 insertions, 157 deletions
diff --git a/doc/build/Makefile b/doc/build/Makefile
index 96e7db019..2a0216b78 100644
--- a/doc/build/Makefile
+++ b/doc/build/Makefile
@@ -2,7 +2,7 @@
#
# You can set these variables from the command line.
-SPHINXOPTS = -v -j auto
+SPHINXOPTS = -j auto
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = output
diff --git a/doc/build/changelog/index.rst b/doc/build/changelog/index.rst
index 6e9189a74..1f5dec176 100644
--- a/doc/build/changelog/index.rst
+++ b/doc/build/changelog/index.rst
@@ -14,6 +14,14 @@ Current Migration Guide
migration_14
+SQLAlchemy 2.0 Overview and Status
+----------------------------------
+
+.. toctree::
+ :titlesonly:
+
+ migration_20
+
Change logs
-----------
diff --git a/doc/build/changelog/migration_20.rst b/doc/build/changelog/migration_20.rst
new file mode 100644
index 000000000..44099c49a
--- /dev/null
+++ b/doc/build/changelog/migration_20.rst
@@ -0,0 +1,1038 @@
+.. _migration_20_toplevel:
+
+=============================
+SQLAlchemy 2.0 Transition
+=============================
+
+.. admonition:: About this document
+
+ SQLAlchemy 2.0 is expected to be a major shift for a wide variety of key
+ SQLAlchemy usage patterns in both the Core and ORM components. The goal
+ of this release is to make a slight readjustment in some of the most
+ fundamental assumptions of SQLAlchemy since its early beginnings, and to
+ deliver a newly streamlined usage model that is hoped to be significantly
+ more minimalist and consistent between the Core and ORM components, as well
+ as more capable. The move of Python to be Python 3 only as well as the
+ emergence of static typing systems for Python 3 are the initial
+ inspirations for this shift, as is the changing nature of the Python
+ community which now includes not just hardcore database programmers but a
+ vast new community of data scientists and students of many different
+ disciplines.
+
+ With the benefit of fifteen years of widespread use and tens of thousands
+ of user questions and issues answered, SQLAlchemy has been ready to
+ reorganize some of its priorities for quite some time, and the "big shift"
+ to Python 3 only is seen as a great opportunity to put the deepest ones
+ into play. SQLAlchemy's first releases were for Python 2.3, which had no
+ context managers, no decorators, Unicode support as mostly an added-on
+ feature that was poorly understood, and a variety of other syntactical
+ shortcomings that would be unknown today. The vast majority of Python
+ packages that are today taken for granted did not exist. SQLAlchemy itself
+ struggled with major API adjustments through versions 0.1 to 0.5, with such
+ major concepts as :class:`.Connection`, :class:`.orm.query.Query`, and the
+ Declarative mapping approach only being conceived and added to releases
+ gradually over a period of a several years.
+
+ The biggest changes in SQLAlchemy 2.0 are targeting the residual
+ assumptions left over from this early period in SQLAlchemy's development as
+ well as the leftover artifacts resulting from the incremental introduction
+ of key API features such as :class:`.orm.query.Query` and Declarative.
+ It also hopes standardize some newer capabilities that have proven to be
+ very effective.
+
+ Within each section below, please note that individual changes are still
+ at differing degrees of certainty; some changes are definitely happening
+ while others are not yet clear, and may change based on the results of
+ further prototyping as well as community feedback.
+
+
+SQLAlchemy 1.x to 2.0 Transition
+================================
+
+.. admonition:: Certainty: definite
+
+ ::
+
+An extremely high priority of the SQLAlchemy 2.0 project is that transition
+from the 1.x to 2.0 series will be as straightforward as possible. The
+strategy will allow for any application to move gradually towards a SQLAlchemy
+2.0 model, first by running on Python 3 only, next running under SQLAlchemy 1.4
+without deprecation warnings, and then by making use of SQLAlchemy 2.0-style
+APIs that will be fully available in SQLAlchemy 1.4.
+
+The steps to achieve this are as follows:
+
+* All applications should ensure that they are fully ported to Python 3 and
+ that Python 2 compatibility can be dropped. This is the first prerequisite
+ to moving towards 2.0.
+
+* a significant portion of the internal architecture of SQLAlchemy 2.0
+ is expected to be made available in SQLAlchemy 1.4. It is hoped that
+ features such as the rework of statement execution and transparent caching
+ features, as well as deep refactorings of ``select()`` and ``Query()`` to
+ fully support the new execution and caching model will be included, pending
+ that continued prototyping of these features are successful. These new
+ architectures will work within the SQLAlchemy 1.4 release transparently with
+ little discernible effect, but will enable 2.0-style usage to be possible, as
+ well as providing for the initial real-world adoption of the new
+ architectures.
+
+* A new deprecation class :class:`.exc.RemovedIn20Warning` is added, which
+ subclasses :class:`.exc.SADeprecationWarning`. Applications and their test
+ suites can opt to enable or disable reporting of the
+ :class:`.exc.RemovedIn20Warning` warning as needed. To some extent, the
+ :class:`.exc.RemovedIn20Warning` deprecation class is analogous to the ``-3``
+ flag available on Python 2 which reports on future Python 3
+ incompatibilities.
+
+* APIs which emit :class:`.exc.RemovedIn20Warning` should always feature a new
+ 1.4-compatible usage pattern that applications can migrate towards. This
+ pattern will then be fully compatible with SQLAlchemy 2.0. In this way,
+ an application can gradually adjust all of its 1.4-style code to work fully
+ against 2.0 as well.
+
+* APIs which are explicitly incompatible with SQLAlchemy 1.x style will be
+ available in two new packages ``sqlalchemy.future`` and
+ ``sqlalchemy.orm.future``. The most prominent objects in these new packages
+ will be the :func:`sqlalchemy.future.select` object, which now features
+ a refined constructor, and additionally will be compatible with ORM
+ querying, as well as the new declarative base construct in
+ ``sqlalchemy.orm.future``.
+
+* SQLAlchemy 2.0 will include the same ``sqlalchemy.future`` and
+ ``sqlalchemy.orm.future`` packages; once an application only needs to run on
+ SQLAlchemy 2.0 (as well as Python 3 only of course :) ), the "future" imports
+ can be changed to refer to the canonical import, for example ``from
+ sqlalchemy.future import select`` becomes ``from sqlalchemy import select``.
+
+
+Python 3 Only
+=============
+
+.. admonition:: Certainty: definite
+
+ ::
+
+At the top level, Python 2 is now retired in 2020, and new Python development
+across the board is expected to be in Python 3. SQLAlchemy will maintain
+Python 2 support throughout the 1.4 series. It is not yet decided if there
+will be a 1.5 series as well and if this series would also continue to
+support Python 2 or not. However, SQLAlchemy 2.0 will be Python 3 only.
+
+It is hoped that introduction of :pep:`484` may proceed from that point forward
+over the course of subsequent major releases, including that SQLAlchemy's
+source will be fully annotated, as well as that ORM level integrations for
+:pep:`484` will be standard. However, :pep:`484` integration is not a goal of
+SQLAlchemy 2.0 itself, and support for this new system in full is expected
+to occur over the course of many major releases.
+
+"Autocommit" removed from both Core and ORM
+===========================================
+
+.. admonition:: Certainty: almost definitely
+
+ "autocommit" at the ORM level is already not a widely used pattern except to
+ the degree that the ``.begin()`` call is desirable, and a new flag
+ ``autobegin=False`` will suit that use case. For Core, the "autocommit"
+ pattern will lose most of its relevance as a result of "connectionless"
+ execution going away as well, so once applications make sure they are
+ checking out connections for their Core operations, they need only use
+ ``engine.begin()`` instead of ``engine.connect()``, which is already the
+ canonically documented pattern in the 1.x docs.
+
+SQLAlchemy's first releases were at odds with the spirit of the Python
+DBAPI (:pep:`249`) in that
+it tried to hide :pep:`249`'s emphasis on "implicit begin" and "explicit commit"
+of transactions. Fifteen years later we now see this was essentially a
+mistake, as SQLAlchemy's many patterns that attempt to "hide" the presence
+of a transaction make for a more complex API which works inconsistently and
+is extremely confusing to especially those users who are new to relational
+databases and ACID transactions in general. SQLAlchemy 2.0 will do away
+with all attempts to implicitly commit transactions, and usage patterns
+will always require that the user demarcate the "beginning" and the "end"
+of a transaction in some way, in the same way as reading or writing to a file
+in Python has a "beginning" and an "end".
+
+In SQLAlchemy 1.x, the following statements will automatically commit
+the underlying DBAPI transaction and then begin a new one, but in SQLAlchemy
+2.0 this will not occur::
+
+ conn = engine.connect()
+
+ # won't autocommit in 2.0
+ conn.execute(some_table.insert().values(foo='bar'))
+
+Nor will this autocommit::
+
+ conn = engine.connect()
+
+ # won't autocommit in 2.0
+ conn.execute("INSERT INTO table (foo) VALUES ('bar')")
+
+The options to force "autocommit" for specific connections or statements
+are also removed::
+
+ # "autocommit" execution option is removed in 2.0
+ conn.execution_options(autocommit=True).execute(stmt)
+
+ conn.execute(stmt.execution_options(autocommit=True))
+
+In the case of autocommit for a pure textual statement, there is actually a
+regular expression that parses every statement in order to detect autocommit!
+Not surprisingly, this regex is continuously failing to accommodate for various
+kinds of statements and stored procedures that imply a "write" to the
+database, leading to ongoing confusion as some statements produce results in
+the database and others don't. By preventing the user from being aware of the
+transactional concept, we get a lot of bug reports on this one because users
+don't understand that databases always use a transaction, whether or not some
+layer is autocommitting it.
+
+SQLAlchemy 2.0 will require that all database actions at every level be
+explicit as to how the transaction should be used. For the vast majority
+of Core use cases, it's the pattern that is already recommended::
+
+ with engine.begin() as conn:
+ conn.execute(some_table.insert().values(foo='bar'))
+
+For "commit as you go, or rollback instead" usage, which resembles how the
+:class:`.orm.Session` is normally used today, new ``.commit()`` and
+``.rollback()`` methods will also be added to :class:`.Connection` itself.
+These will typically be used in conjunction with the :meth:`.Engine.connect`
+method::
+
+ with engine.connect() as conn:
+ conn.execute(some_table.insert().values(foo='bar'))
+ conn.commit()
+
+ conn.execute("some other SQL")
+ conn.rollback()
+
+For the ORM, the above two patterns are already more or less how the
+:class:`.Session` is used already::
+
+ session = sessionmaker()
+
+ session.add(<things>)
+
+ session.execute(<things>)
+
+ session.commit()
+
+
+To complement the ``begin()`` use case of Core, the :class:`.Session` will
+also include a new mode of operation called ``autobegin=False``, which is
+intended to replace the ``autocommit=True`` mode. In this mode, the
+:class:`.Session` will require that :meth:`.Session.begin` is called in order
+to work with the database::
+
+ session = sessionmaker(autobegin=False)
+
+ with session.begin():
+ session.add(<things>)
+
+The difference between ``autobegin=False`` and ``autocommit=True`` is that
+the :class:`.Session` will not allow any database activity outside of the
+above transaction block. The 1.4 change :ref:`change_5074` is part of this
+architecture.
+
+In the case of both core :class:`.Connection` as well as orm :class:`.Session`,
+if neither ``.commit()`` nor ``.rollback()`` are called, the connection is
+returned to the pool normally where an implicit (yes, still need this one)
+rollback will occur. This is the case already for Core and ORM::
+
+ with engine.connect() as conn:
+ results = conn.execute("select * from some_table")
+ return results
+
+ # connection is returned to the pool, transaction is implicitly
+ # rolled back.
+
+ # or
+
+ session = sessionmaker()
+ results = session.execute(<some query>)
+
+ # connection is returned to the pool, transaction is implicitly
+ # rolled back.
+ session.close()
+
+"Implicit" and "Connectionless" execution, "bound metadata" removed
+====================================================================
+
+.. admonition:: Certainty: definite
+
+ The Core documentation has already standardized on the desired pattern here,
+ so it is likely that most modern applications would not have to change
+ much in any case, however there are probably a lot of apps that have
+ a lot of ``engine.execute()`` calls that will need to be adjusted.
+
+"Connectionless" execution refers to the still fairly popular pattern of
+invoking ``.execute()`` from the :class:`.Engine`::
+
+ result = engine.execute(some_statement)
+
+The above operation implicitly procures a :class:`.Connection` object,
+and runs the ``.execute()`` method on it. This seems like a pretty simple
+and intuitive method to have so that people who just need to invoke a few
+SQL statements don't need all the verbosity with connecting and all that.
+
+Fast forward fifteen years later and here is all that's wrong with that:
+
+* Programs that feature extended strings of ``engine.execute()`` calls, for
+ each statement getting a new connection from the connection pool (or
+ perhaps making a new database connection if the pool is in heavy use),
+ beginning a new transaction, invoking the statement, committing, returning
+ the connection to the pool. That is, the nuance that this was intended for
+ a few ad-hoc statements but not industrial strength database operations
+ is lost immediately. New users are confused as to the difference between
+ ``engine.execute()`` and ``connection.execute()``. Too many choices are
+ presented.
+
+* The above technique relies upon the "autocommit" feature, in order to work
+ as expected with any statement that implies a "write". Since autocommit
+ is already misleading, the above pattern is no longer feasible (the older
+ "threadlocal" engine strategy which provided for begin/commit on the engine
+ itself is also removed by SQLAlchemy 1.3).
+
+* The above pattern returns a result which is not yet consumed. So how
+ exactly does the connection that was used for the statement, as well as the
+ transaction necessarily begun for it, get handled, when there is still
+ an active cursor ? The answer is in multiple parts. First off, the
+ state of the cursor after the statement is invoked is inspected, to see if
+ the statement in fact has results to return, that is, the ``cursor.description``
+ attribute is non-None. If not, we assume this is a DML or DDL statement,
+ the cursor is closed immediately, and the result is returned after the
+ connection is closed. If there is a result, we leave the cursor and
+ connection open, the :class:`.ResultProxy` is then responsible for
+ autoclosing the cursor when the results are fully exhausted, and at that
+ point another special flag in the :class:`.ResultProxy` indicates that the
+ connection also needs to be returned to the pool.
+
+That last one especially sounds crazy right? That's why ``engine.execute()``
+is going away. It looks simple on the outside but it is unfortunately not,
+and also, it's unnecessary and is frequently mis-used. A whole series of
+intricate "autoclose" logic within the :class:`.ResultProxy` can be removed
+when this happens.
+
+With "connectionless" execution going away, we also take away a pattern that
+is even more legacy, which is that of "implicit, connectionless" execution::
+
+ result = some_statement.execute()
+
+The above pattern has all the issues of "connectionless" execution, plus it
+relies upon the "bound metadata" pattern, which SQLAlchemy has tried to
+de-emphasize for many years.
+
+Because implicit execution is removed, there's really no reason for "bound"
+metadata to exist. There are many internal structures that are involved with
+locating the "bind" for a particular statement, to see if an :class:`.Engine`
+is associated with some SQL statement exists which necessarily involves an
+additional traversal of the statement, just to find the correct dialect with
+which to compile it. This complex and error-prone logic can be removed from
+Core by removing "bound" metadata.
+
+Overall, the above executional patterns were introduced in SQLAlchemy's
+very first 0.1 release before the :class:`.Connection` object even existed.
+After many years of de-emphasizing these patterns, "implicit, connectionless"
+execution and "bound metadata" are no longer as widely used so in 2.0 we seek
+to finally reduce the number of choices for how to execute a statement in
+Core from "many"::
+
+ # many choices
+
+ # bound metadata?
+ metadata = MetaData(engine)
+
+ # or not?
+ metadata = MetaData()
+
+ # execute from engine?
+ result = engine.execute(stmt)
+
+ # or execute the statement itself (but only if you did
+ # "bound metadata" above, which means you can't get rid of "bound" if any
+ # part of your program uses this form)
+ result = stmt.execute()
+
+ # execute from connection, but it autocommits?
+ conn = engine.connect()
+ conn.execute(stmt)
+
+ # execute from connection, but autocommit isn't working, so use the special
+ # option?
+ conn.execution_options(autocommit=True).execute(stmt)
+
+ # or on the statement ?!
+ conn.execute(stmt.execution_options(autocommit=True))
+
+ # or execute from connection, and we use explicit transaction?
+ with conn.begin():
+ conn.execute(stmt)
+
+to "one"::
+
+ # one choice!
+
+ with engine.begin() as conn:
+ result = conn.execute(stmt)
+
+
+ # OK one and a half choices:
+
+ with engine.connect() as conn:
+ result = conn.execute(stmt)
+ conn.commit()
+
+Slight Caveat - there still may need to be a "statement.execute()" kind of feature
+----------------------------------------------------------------------------------
+
+.. admonition:: Certainty: tentative
+
+ Things get a little tricky with "dynamic" ORM relationships as well as the
+ patterns that Flask uses so we have to figure something out.
+
+To suit the use case of ORM "dynamic" relationships as well as Flask-oriented
+ORM patterns, there still may be some semblance of "implicit" execution of
+a statement, however, it won't really be "connectionless". Likely, a statement
+can be directly bound to a :class:`.Connection` or :class:`.Session` once
+constructed::
+
+ stmt = select(some_table).where(criteria)
+
+ with engine.begin() as conn:
+ stmt = stmt.invoke_with(conn)
+
+ result = stmt.execute()
+
+The above pattern, if we do it, will not be a prominently encouraged public
+API; it will be used for particular extensions like "dynamic" relationships and
+Flask-style queries only.
+
+
+New "Invocation" Stage Between statement construction and Execution
+===================================================================
+
+.. admonition:: Certainty: tentative
+
+ Pending further prototyping, this is part of a larger plan that impacts
+ statement compilation, execution, and result processing.
+
+A new "invocation" object and invocation step is added to :class:`.Connection`
+as well as :class:`.orm.Session` that provides where options related to the
+execution of a statement should occur. This step is optional and handled
+automatically when :meth:`.Connection.execute` or :meth:`.Session.execute` is
+called, however is used explcitly for many new purposes::
+
+ with engine.begin() as conn:
+ stmt = table.insert()
+
+ conn.invoke(stmt).params(foo='bar').execute()
+
+ stmt = select(table)
+
+ result = conn.invoke(stmt).execution_options(stream_results=True).execute()
+ for chunk in result.chunks(size=1000):
+ process(chunk)
+
+
+ result = conn.invoke(stmt).execution_options(enable_cache=False).execute()
+
+Similarly for ORM use::
+
+ stmt = select(User).join(User.addresses).where(User.name == 'foo')
+
+ result = session.invoke(stmt).execution_options(keep_results=True).execute()
+
+ user = result.one()
+
+
+When an ``.execute()`` method is called directly from :class:`.Connection`
+or :class:`.Session`, the "invocation" object is returned, which then
+serves as the basis for retrieving result rows::
+
+ with engine.begin() as conn:
+
+ stmt = select(table).where(table.c.foo == 'bar')
+
+ # equivalent to conn.invoke(stmt).execute()
+ result = conn.execute(stmt)
+
+ rows = result.all()
+
+
+ResultProxy replaced with InvocationContext which has more refined methods
+==========================================================================
+
+.. admonition:: Certainty: tentative
+
+ This is again part of the rearchitecture of "execute()" internals and is
+ pending further prototyping.
+
+A major goal of SQLAlchemy 2.0 is to unify how "results" are handled between
+the ORM and Core. Towards this goal, version 1.4 will already standardized
+both Core and ORM on a reworked notion of the ``RowProxy`` class, which
+is now much more of a "named tuple"-like object. Beyond that however,
+SQLAlchemy 2.0 seeks to unify the means by which a set of rows is called
+upon, where the more refined ORM-like methods ``.all()``, ``.one()`` and
+``.first()`` will now also be how Core retrieves rows, replacing the
+cursor-like ``.fetchall()``, ``.fetchone()`` methods. The notion of
+receiving "chunks" of a result at a time will be standardized across both
+systems using a method ``.chunks()`` which will behave similarly to
+``.fetchmany()``, but will work in terms of iterators.
+
+These new methods will be available from the "invocation" object introduced
+in the preceding section, which will be present both in Core and ORM equally::
+
+ with engine.begin() as conn:
+ stmt = table.insert()
+
+ result = conn.execute(stmt)
+
+ # InvocationContext against an INSERT DML
+ result.inserted_primary_key
+
+ stmt = select(table)
+
+ result = conn.execute(stmt) # statement is executed
+
+ result.all() # list
+ result.one() # first row, if doesn't exist or second row exists it raises
+ result.one_or_none() # first row or none, if second row exists it raises
+ result.first() # first row (warns if additional rows remain?)
+ result # iterator
+ result.chunks(size=1000) # iterator of lists of size N
+ result.chunk(size=1000) # list of rows of size N
+
+ # limiting columns
+
+ result.scalar() # first col of first row (warns if additional rows remain?)
+ result.scalars() # iterator of first col of each row
+ result.scalars().all() # same, as a list
+
+ result.columns('a', 'b').<anything> # limit column tuples
+ result.columns(table.c.a, table.c.b) # using Column (or ORM attribute) objects
+
+ result.columns('b', 'a') # order is maintained
+
+ # if the result is an ORM result, you could do:
+ result.columns(User, Address) # assuming these are available entities
+
+
+ # index access and slices ?
+ result[0].all() # same as result.scalars().all()
+ result[2:5].all() # same as result.columns('c', 'd', 'e').all()
+
+
+Declarative becomes a first class API
+=====================================
+
+.. admonition:: Certainty: almost definitely
+
+ Declarative is already what all the ORM documentation refers towards
+ so it doesn't even make sense that it's an "ext". The hardest part will
+ be integrating the declarative documentation appropriately.
+
+Declarative will now be part of ``sqlalchemy.orm`` in 2.0, and in 1.4 the
+new version will be present in ``sqlalchemy.orm.future``. The concept
+of the ``Base`` class will be there as it is now and do the same thing
+it already does, however it will also have some new capabilities.
+
+
+The original "mapper()" function removed; replaced with a Declarative compatibility function
+============================================================================================
+
+.. admonition:: Certainty: tentative
+
+ The proposal to have "mapper()" be a sub-function of declarative simplifies
+ the codepaths towards a class becoming mapped. The "classical mapping"
+ pattern doesn't really have that much usefulness, however as some users have
+ expressed their preference for it, the same code pattern will continue to
+ be available, just on top of declarative. Hopefully it should be a little
+ nicer even.
+
+Declarative has become very capable and in fact a mapping that is set up with
+declarative may have a superior configuration than one made with ``mapper()`` alone.
+Features that make a declarative mapping superior include:
+
+* The declarative mapping has a reference to the "class registry", which is a
+ local set of classes that can then be accessed configurationally via strings
+ when configuring inter-class relationships. Put another way, using declarative
+ you can say ``relationship("SomeClass")``, and the string name ``"SomeClass"``
+ is late-resolved to the actual mapped class ``SomeClass``.
+
+* Declarative provides convenience hooks on mapped classes such as
+ ``__declare_first__`` and ``__declare_last__``. It also allows for
+ mixins and ``__abstract__`` classes which provide for superior organization
+ of classes and attributes.
+
+* Declarative sets parameters on the underlying ``mapper()`` that allow for
+ better behaviors. A key example is when configuring single table
+ inheritance, and a particular table column is local to a subclass, Declarative
+ automatically sets up ``exclude_columns`` on the base class and other sibling
+ classes that don't include those columns.
+
+* Declarative also ensures that "inherits" is configured appropriately for
+ mappers against inherited classes and checks for several other conditions
+ that can only be determined by the fact that Declarative scans table information
+ from the mapped class itself.
+
+Some of the above Declarative capabilities are lost when one declares their
+mapping using ``__table__``, however the class registry and special hooks
+are still available. Declarative does not in fact depend on the use of
+a special base class or metaclass, this is just the API that is currently
+used. An alternative API that behaves just like ``mapper()`` can be defined
+right now as follows::
+
+ from sqlalchemy.ext.declarative import base
+ def declarative_mapper():
+ _decl_class_registry = {}
+
+ def mapper(cls, table, properties={}):
+ cls.__table__ = table
+ cls._decl_class_registry = _decl_class_registry
+ for key, value in properties.items():
+ setattr(cls, key, value)
+ base._as_declarative(cls, cls.__name__, cls.__dict__)
+
+ return mapper
+
+ # mapper here is the mapper() function
+ mapper = declarative_mapper()
+
+Above, the ``mapper()`` callable is using a class registry that's local
+to where the ``declarative_mapper()`` function was called. However, we
+can just as easily add the above ``mapper()`` function to any declarative base,
+to make for a pattern such as::
+
+ from sqlalchemy.orm import declarative_base
+
+ base = declarative_base()
+
+ class MyClass(object):
+ pass
+
+ my_table = Table("my_table", base.metadata, Column('id', Integer, primary_key=True))
+
+ # "classical" mapping:
+ base.mapper(MyClass, my_table)
+
+In 2.0, an application that still wishes to use a separate :class:`.Table` and
+does not want to use Declarative with ``__table__``, can instead use the above
+pattern which basically does the same thing.
+
+
+ORM Query Unified with Core Select
+==================================
+
+.. admonition:: Certainty: tentative
+
+ Tenative overall, however there will almost definitely be
+ architectural changes in :class:`.Query` that move it closer to
+ :meth:`.select`.
+
+ The ``session.query(<cls>)`` pattern itself will likely **not** be fully
+ removed. As this pattern is extremely prevalent and numerous within any
+ individual application, and that it does not intrinsically suggest an
+ "antipattern" from a development standpoint, at the moment we are hoping
+ that a transition to 2.0 won't require a rewrite of every ``session.query()``
+ call, however it will be a legacy pattern that may warn as such.
+
+Ever wonder why SQLAlchemy :func:`.select` uses :meth:`.Select.where` to add
+a WHERE clause and :class:`.Query` uses :meth:`.Query.filter` ? Same here!
+The :class:`.Query` object was not part of SQLAlchemy's original concept.
+Originally, the idea was that the :class:`.Mapper` construct itself would
+be able to select rows, and that :class:`.Table` objects, not classes,
+would be used to create the various criteria in a Core-style approach. The
+:class:`.Query` was basically an extension that was proposed by a user who
+quite plainly had a better idea of how to build up SQL queries. The
+"buildable" approach of :class:`.Query`, originally called ``SelectResults``,
+was also adapted to the Core SQL objects, so that :func:`.select` gained
+methods like :meth:`.Select.where`, rather than being an all-at-once composed
+object. Later on, ORM classes gained the ability to be used directly in
+constructing SQL criteria. :class:`.Query` evolved over many years to
+eventually support production of all the SQL that :func:`.select` does, to
+the point where having both forms has now become redundant.
+
+SQLAlchemy 2.0 will resolve the inconsistency here by promoting the concept
+of :func:`.select` to be the single way that one constructs a SELECT construct.
+For Core usage, the ``select()`` works mostly as it does now, except that it
+gains a real working ``.join()`` method that will append JOIN conditions to the
+statement in the same way as works for :meth:`.Query.join` right now.
+
+For ORM use however, one can construct a :func:`.select` using ORM objects, and
+then when delivered to the ``.invoke()`` or ``.execute()`` method of
+:class:`.Session`, it will be interpreted appropriately::
+
+ stmt = select(User).join(User.addresses).where(Address.email == 'foo@bar.com')
+
+ rows = session.execute(stmt).all()
+
+Similarly, methods like :meth:`.Query.update` and :meth:`.Query.delete` are now
+replaced by usage of the :func:`.update` and :func:`.delete` constructs directly::
+
+ stmt = update(User).where(User.name == 'foo').values(name='bar')
+
+ session.invoke(stmt).execution_options(synchronize_session=False).execute()
+
+ORM Query relationship patterns simplified
+==========================================
+
+.. admonition:: Certainty: definite
+
+ The patterns being removed here are enormously problematic internally,
+ represent an older, obsolete way of doing things and the more advanced
+ aspects of it are virtually never used
+
+Joining / loading on relationships uses attributes, not strings
+----------------------------------------------------------------
+
+This refers to patterns such as that of :meth:`.Query.join` as well as
+query options like :func:`.joinedload` which currently accept a mixture of
+string attribute names or actual class attributes. The string calling form
+leaves a lot more ambiguity and is also more complicated internally, so will
+be deprecated in 1.4 and removed by 2.0. This means the following won't work::
+
+ q = select(User).join("addresses")
+
+Instead, use the attribute::
+
+ q = select(User).join(User.addresses)
+
+Attributes are more explicit, such as if one were querying as follows::
+
+ u1 = aliased(User)
+ u2 = aliased(User)
+
+ q = select(u1, u2).where(u1.id > u2.id).join(u1.addresses)
+
+Above, the query knows that the join should be from the "u1" alias and
+not "u2".
+
+Similar changes will occur in all areas where strings are currently accepted::
+
+ # removed
+ q = select(User).options(joinedload("addresess"))
+
+ # use instead
+ q = select(User).options(joinedload(User.addresess))
+
+ # removed
+ q = select(Address).where(with_parent(u1, "addresses"))
+
+ # use instead
+ q = select(Address).where(with_parent(u1, User.addresses))
+
+Chaining using lists of attributes, rather than individual calls, removed
+--------------------------------------------------------------------------
+
+"Chained" forms of joining and loader options which accept multiple mapped
+attributes in a list will also be removed::
+
+ # removed
+ q = select(User).join("orders", "items", "keywords")
+
+ # use instead
+ q = select(User).join(User.orders).join(Order.items).join(Item.keywords)
+
+
+join(..., aliased=True), from_joinpoint removed
+-----------------------------------------------
+
+The ``aliased=True`` option on :meth:`.Query.join` is another feature that
+seems to be almost never used, based on extensive code searches to find
+actual use of this feature. The internal complexity that the ``aliased=True``
+flag requires is **enormous**, and will be going away in 2.0.
+
+Since most users aren't familiar with this flag, it allows for automatic
+aliasing of elements along a join, which then applies automatic aliasing
+to filter conditions. The original use case was to assist in long chains
+of self-referential joins, such as::
+
+ q = session.query(Node).\
+ join("children", "children", aliased=True).\
+ filter(Node.name == 'some sub child')
+
+Where above, there would be two JOINs between three instances of the "node"
+table assuming ``Node.children`` is a self-referential (e.g. adjacency list)
+relationship to the ``Node`` class itself. the "node" table would be aliased
+at each step and the final ``filter()`` call would adapt itself to the last
+"node" table in the chain.
+
+It is this automatic adaption of the filter criteria that is enormously
+complicated internally and almost never used in real world applications. The
+above pattern also leads to issues such as if filter criteria need to be added
+at each link in the chain; the pattern then must use the ``from_joinpoint``
+flag which SQLAlchemy developers could absolultely find no occurence of this
+parameter ever being used in real world applications::
+
+ q = session.query(Node).\
+ join("children", aliased=True).filter(Node.name == 'some child').\
+ join("children", aliased=True, from_joinpoint=True).\
+ filter(Node.name == 'some sub child')
+
+The ``aliased=True`` and ``from_joinpoint`` parameters were developed at a time
+when the :class:`.Query` object didn't yet have good capabilities regarding
+joining along relationship attributes, functions like
+:meth:`.PropComparator.of_type` did not exist, and the :func:`.aliased`
+construct itself didn't exist early on.
+
+The above patterns are all suited by standard use of the :func:`.aliased`
+construct, resulting in a much clearer query as well as removing hundreds of
+lines of complexity from the internals of :class:`.Query` (or whatever it is
+to be called in 2.0 :) ) ::
+
+ n1 = aliased(Node)
+ n2 = aliased(Node)
+ q = select(Node).join(Node.children.of_type(n1)).\
+ join(n1.children.of_type(n2)).\
+ where(n1.name == "some child").\
+ where(n2.name == "some sub child")
+
+As was the case earlier, the ``.join()`` method will still allow arguments
+of the form ``(target, onclause)`` as well::
+
+ n1 = aliased(Node)
+ n2 = aliased(Node)
+
+ # still a little bit of "more than one way to do it" :)
+ # but way better than before! We'll be OK
+
+ q = select(Node).join(n1, Node.children).\
+ join(n2, n1.children).\
+ where(n1.name == "some child").\
+ where(n2.name == "some sub child")
+
+
+
+By using attributes instead of strings above, the :meth:`.Query.join` method
+no longer needs the almost never-used option of ``from_joinpoint``.
+
+Transparent Statement Compilation Caching replaces "Baked" queries, works in Core
+==================================================================================
+
+.. admonition:: Certainty: tentative
+
+ Pending further architectural prototyping and performance testing
+
+A major restructuring of the Core internals as well as of that of the ORM
+:class:`.Query` will be reorganizing the major statement objects to have very
+simplified "builder" internals, that is, when you construct an object like
+``select(table).where(criteria).join(some_table)``, the arguments passed are
+simply stored and as little processing as possible will occur. Then there is
+a new mechanism by which a cache key can be generated from all of the state
+passed into the object at this point. The Core execution system will make use
+of this cache key when seeking to compile a statement, using a pre-compiled
+object if one is available. If a compiled object needs to be constructed, the
+additional work of interpreting things like the "where" clause, interpreting
+``.join()``, etc. into SQL elements will occur at this point, in contrast to the
+1.3.x and earlier series of SQLAlchemy and earlier where it occurs during
+construction.
+
+The Core execution system will also initiate this same task on behalf of the
+"ORM" version of ``select()``; the "post-construction" worker is pluggable,
+so in the context of the ORM, an object similar to the :class:`.QueryContext`
+will perform this work. While :class:`.QueryContext` is currently invoked
+when one emits a call like ``query.all()``, constructing a ``select()``
+object which is passed to the Core for execution, the new flow will be that
+the ``select()`` object that was built up with ORM state will be sent to Core,
+where the "post-construction" task invoked when no cached object is
+present will invoke :class:`.QueryContext` which then processes all the
+state of the ``select()`` in terms of the ORM, and then invokes it
+like any other Core statement. A similar "pre-result" step is associated
+with the execution which is where the plain result rows will be filtered
+into ORM rows.
+
+This is in constrast to the 1.3.x and earlier series of SQLAlchemy where the
+"post-construction" of the query and "pre-result" steps are instead
+"pre-execution" and "post-result", that is, they occur outside of where Core
+would be able to cache the results of the work performed. The new
+architecture integrates the work done by the ORM into a new flow supported by
+Core.
+
+To complete the above system, a new "lambda" based SQL construction system will
+also be added, so that construction of ``select()`` and other constructs is
+even faster outside of that which is cached; this "lambda" based system is
+based on a similar concept as that of the "baked" query but is more
+sophisticated and refined so that it is easier to use. It also will be
+completely optional, as the caching will still work without the use of lambda
+constructs.
+
+All SQLAlchemy applications will have access to a large portion of the
+performance gains that are offered by the "baked" query system now, and it will
+apply to all statements, Core / ORM, select/insert/update/delete/other, and
+it will be fully transparent. Applications that wish to reduce statement
+building latency even further to the levels currently offered by the "baked"
+system can opt to use the "lambda" constructs.
+
+Uniquifying ORM Rows
+====================
+
+.. admonition:: Certainty: tentative
+
+ However this is a widely requested behavior so
+ it's likely something will have to happen in this regard
+
+ORM rows returned by ``session.execute(stmt)`` are no longer automatically
+"uniqued"; this must be called explicitly::
+
+ stmt = select(User).options(joinedload(User.addresses))
+
+ # statement will raise if unique() is not used, due to joinedload()
+ # of a collection. in all other cases, unique() is not needed
+ rows = session.invoke(stmt).unique().execute().all()
+
+This includes when joined eager loading with collections is used. It is
+advised that for eager loading of collections, "selectin" loading is used
+instead. When collections that are set up to load as joined eager are present
+and ``unique()`` is not used, an exception is raised, as this will produce many
+duplicate rows and is not what the user intends. Joined eager loading of
+many-to-one relationships does not present any issue, however.
+
+This change will also end the ancient issue of users being confused why
+``session.query(User).join(User.addresses).count()`` returns a different number
+than that of ``session.query(User).join(User.addresses).all()``. The results
+will now be the same.
+
+
+Tuples, Scalars, single-row results with ORM / Core results made consistent
+============================================================================
+
+.. admonition:: Certainty: tentative
+
+ Again this is an often requested behavior
+ at the ORM level so something will have to happen in this regard
+
+The :meth:`.InvocationContext.all` method now delivers named-tuple results
+in all cases, even for an ORM select that is against a single entity. This
+is for consistency in the return type.
+
+TODO description::
+
+ # iterator
+ for user in session.execute(stmt).scalars():
+
+TODO description::
+
+ users = session.execute(stmt).scalars().all()
+
+TODO description::
+
+ # first() no longer applies a limit
+ users = session.execute(stmt.limit(1)).first()
+
+
+ # first() when there are rows remaining warns
+ users = session.execute(stmt).first()
+ Warning: additional rows discarded; apply .limit(1) to the statement when
+ using first()
+
+How Do Magic Flask patterns etc work?!?!
+-----------------------------------------
+
+.. admonition:: Certainty: tentative
+
+ This is where the "remove Query and replace with
+ ``session.execute(select(User))``" pattern starts to hit a lot of friction,
+ so there may still have to be some older-style patterns in place. it's not
+ clear if the ``.execute()`` step will be required, for example.
+
+
+::
+
+ session = scoped_session(...)
+
+ class User(magic_flask_thing_that_links_to_scoped_session):
+ # ...
+
+
+ # old:
+
+ users = User.query.filter(User.name.like('%foo%')).all()
+
+ # new:
+
+ <drumroll>
+
+ users = User.select.where(User.name.like('%foo%')).execute().all()
+
+Above, we backtrack slightly on the "implicit execution removed" aspect,
+where Flask will be able to bind a query / select to the current Session.
+
+Same thing with lazy=dynamic....
+---------------------------------
+
+The same pattern is needed for "dynamic" relationships::
+
+ user.addresess.where(Address.id > 10).execute().all()
+
+
+What about asyncio???
+=====================
+
+.. admonition:: Certainty: tentative
+
+ Not much is really being proposed here except a willingness to continue
+ working with third-party extensions and contributors who want to work on
+ the problem, as well as hopefully making the task of integration a little
+ bit more straightforward.
+
+How can SQLAlchemy do a whole re-think for Python 3 only and not take into
+account asyncio? The current thinking here is going to be mixed for fans
+of asyncio-everything, here are the bulletpoints:
+
+* As is likely well known SQLAlchemy developers maintain that `asyncio with
+ SQL queries usually not that compelling of an
+ idea <https://techspot.zzzeek.org/2015/02/15/asynchronous-python-and-databases/>`_
+
+* There's almost no actual advantage to having an "asyncio" version of
+ SQLAlchemy other than personal preference and arguably interoperability
+ with existing asyncio code (however thread executors remain probably a
+ better option). Database connections do not
+ usually fit the criteria of the kind of socket connection that benefits
+ by being accessed in a non-blocking way, since they are usually local,
+ fast services that are accessed on a connection-limited scale. This is
+ in complete contrast to the use case for non-blocking IO which is massively
+ scaled connections to sockets that are arbitrarily slow and/or sleepy.
+
+* Nevertheless, lots of Python programmers like the asyncio approach and feel
+ more comfortable working with requests in the inheritently "callback"
+ style of event-based programming. SQLAlchemy has every desire for these
+ people to be happy.
+
+* Making things complicated is that Python doesn't have a `spec for an asyncio
+ DBAPI <https://discuss.python.org/t/asynchronous-dbapi/2206/>`_ as of yet, which
+ makes it pretty tough for DBAPIs to exist without them all being dramatically
+ different in how they work and would be integrated.
+
+* There are however a few DBAPIs for PostgreSQL that are truly non-blocking,
+ as well as at least one for MySQL that works with non-blocking IO. It's not
+ known if any such system exists for SQLite, Oracle, ODBC datasources, SQL
+ Server, etc.
+
+* There are (more than one?) extensions of SQLAlchemy right now which basically
+ pick and choose a few parts of the compilation APIs and then reimplement
+ their own engine implementation completely, such as `aiopg <https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/connection.py>`_.
+
+* These implementations appear to be useful for users however they aren't able
+ to keep up with SQLAlchemy's own capabilities and they likely don't really
+ work for lots of existing use cases either.
+
+* Essentially, it is hoped that the re-architecting of :class:`.Connection`
+ to no longer support things like "autocommit" and "connectionless"
+ execution, as well as the changes to how result fetching will work with the
+ ``InvocationContext`` which is hoped to be simpler in how it interacts with
+ the cursor, will make it **much easier** to build async versions of
+ SQLAlchemy's :class:`.Connection`. The simplified model of
+ ``Connection.execute()`` and ``Session.execute()`` as the single point of
+ invocation of queries should also make things easier.
+
+* SQLAlchemy has always remained `fully open
+ <https://github.com/sqlalchemy/sqlalchemy/issues/3414>`_ to having a real
+ asyncio extension present as part of SQLAlchemy itself. However this would
+ require **dedicated, long term maintainers** in order for it to be a thing.
+
+* It's probably better that such approaches remain third party, however it
+ is hoped that architectural changes in SQLAlchemy will make such approaches
+ more straightforward to implement and track SQLAlchemy's capabilities.
+
+
diff --git a/doc/build/conf.py b/doc/build/conf.py
index 7d6c444ff..805290a89 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -43,7 +43,7 @@ extensions = [
# have reported this.
templates_path = [os.path.abspath("templates")]
-nitpicky = True
+nitpicky = False
# The suffix of source filenames.
source_suffix = ".rst"
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index 2537f92b5..77bf2ace0 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -2209,8 +2209,8 @@ method as Python dictionary keys, there is no other fixed ordering
available.
However in some cases, the order of parameters rendered in the SET clause of an
-UPDATE statement can be significant. The main example of this is when using
-MySQL and providing updates to column values based on that of other
+UPDATE statement may need to be explicitly stated. The main example of this is
+when using MySQL and providing updates to column values based on that of other
column values. The end result of the following statement::
UPDATE some_table SET x = y + 10, y = 20
@@ -2224,20 +2224,21 @@ a per-value basis, as opposed to on a per-row basis, and as each SET clause
is evaluated, the values embedded in the row are changing.
To suit this specific use case, the
-:paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`
-flag may be used. When using this flag, we supply a **Python list of 2-tuples**
-as the argument to the :meth:`.Update.values` method::
+:meth:`.update.ordered_values` method may be used. When using this method,
+we supply a **series of 2-tuples**
+as the argument to the method::
- stmt = some_table.update(preserve_parameter_order=True).\
- values([(some_table.c.y, 20), (some_table.c.x, some_table.c.y + 10)])
+ stmt = some_table.update().\
+ ordered_values((some_table.c.y, 20), (some_table.c.x, some_table.c.y + 10))
-The list of 2-tuples is essentially the same structure as a Python dictionary
-except it is ordered. Using the above form, we are assured that the
-"y" column's SET clause will render first, then the "x" column's SET clause.
-
-.. versionadded:: 1.0.10 Added support for explicit ordering of UPDATE
- parameters using the :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order` flag.
+The series of 2-tuples is essentially the same structure as a Python
+dictionary, except that it explicitly suggests a specific ordering. Using the
+above form, we are assured that the "y" column's SET clause will render first,
+then the "x" column's SET clause.
+.. versionchanged:: 1.4 Added the :meth:`.Update.ordered_values` method which
+ supersedes the :paramref:`.update.preserve_parameter_order` flag that will
+ be removed in SQLAlchemy 2.0.
.. seealso::
diff --git a/doc/build/errors.rst b/doc/build/errors.rst
index 547464636..105948dfc 100644
--- a/doc/build/errors.rst
+++ b/doc/build/errors.rst
@@ -33,6 +33,43 @@ Within this section, the goal is to try to provide background on some of the
most common runtime errors as well as programming time errors.
+Legacy API Features
+===================
+
+.. _error_b8d9:
+
+The <some function> in SQLAlchemy 2.0 will no longer <something>; use the "future" construct
+--------------------------------------------------------------------------------------------
+
+SQLAlchemy 2.0 is expected to be a major shift for a wide variety of key
+SQLAlchemy usage patterns in both the Core and ORM components. The goal
+of this release is to make a slight readjustment in some of the most
+fundamental assumptions of SQLAlchemy since its early beginnings, and
+to deliver a newly streamlined usage model that is hoped to be significantly
+more minimalist and consistent between the Core and ORM components, as well as
+more capable.
+
+Introduced at :ref:`migration_20_toplevel`, the SQLAlchemy 2.0 project includes
+a comprehensive future compatibility system that is to be integrated into the
+1.4 series of SQLAlchemy, such that applications will have a clear,
+unambiguous, and incremental upgrade path in order to migrate applications to
+being fully 2.0 compatible. The :class:`.exc.RemovedIn20Warning` deprecation
+warning is at the base of this system to provide guidance on what behaviors in
+an existing codebase will need to be modified.
+
+For some occurrences of this warning, an additional recommendation to use an
+API in either the ``sqlalchemy.future`` or ``sqlalchemy.orm.future`` packages
+may be present. This refers to two special future-compatibility packages that
+are part of SQLAlchemy 1.4 and are there to help migrate an application to the
+2.0 version.
+
+.. seealso::
+
+ :ref:`migration_20_toplevel` - An overview of the upgrade process from
+ the 1.x series, as well as the current goals and progress of SQLAlchemy
+ 2.0.
+
+
Connections and Transactions
============================
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 609a60f7c..1c47902ab 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -1715,7 +1715,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
select = select._generate()
select._mssql_visit = True
select = (
- select.column(
+ select.add_columns(
sql.func.ROW_NUMBER()
.over(order_by=_order_by_clauses)
.label("mssql_rn")
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 87e0baa58..8c69bf097 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -986,7 +986,7 @@ class OracleCompiler(compiler.SQLCompiler):
for elem in for_update.of:
if not select.selected_columns.contains_column(elem):
- select = select.column(elem)
+ select = select.add_columns(elem)
# Wrap the middle select and add the hint
inner_subquery = select.alias()
@@ -1056,7 +1056,7 @@ class OracleCompiler(compiler.SQLCompiler):
limitselect._for_update_arg = for_update
select = limitselect
else:
- limitselect = limitselect.column(
+ limitselect = limitselect.add_columns(
sql.literal_column("ROWNUM").label("ora_rn")
)
limitselect._oracle_visit = True
@@ -1069,7 +1069,7 @@ class OracleCompiler(compiler.SQLCompiler):
limitselect_cols.corresponding_column(elem)
is None
):
- limitselect = limitselect.column(elem)
+ limitselect = limitselect.add_columns(elem)
limit_subquery = limitselect.alias()
origselect_cols = orig_select.selected_columns
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 378890444..d900a74b8 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -670,6 +670,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
# a hook for SQLite's translation of
# result column names
+ # NOTE: pyhive is using this hook, can't remove it :(
_translate_colname = None
_expanded_parameters = util.immutabledict()
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index c8e71dda5..cc096ad03 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -587,11 +587,24 @@ class NotSupportedError(DatabaseError):
class SADeprecationWarning(DeprecationWarning):
- """Issued once per usage of a deprecated API."""
+ """Issued for usage of deprecated APIs."""
+
+
+class RemovedIn20Warning(SADeprecationWarning):
+ """Issued for usage of APIs specifically deprecated in SQLAlchemy 2.0.
+
+ .. seealso::
+
+ :ref:`error_b8d9`.
+
+ """
class SAPendingDeprecationWarning(PendingDeprecationWarning):
- """Issued once per usage of a deprecated API."""
+ """A similar warning as :class:`.SADeprecationWarning`, this warning
+ is not used in modern versions of SQLAlchemy.
+
+ """
class SAWarning(RuntimeWarning):
diff --git a/lib/sqlalchemy/future/__init__.py b/lib/sqlalchemy/future/__init__.py
new file mode 100644
index 000000000..808ef076a
--- /dev/null
+++ b/lib/sqlalchemy/future/__init__.py
@@ -0,0 +1,15 @@
+# sql/future/__init__.py
+# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Future 2.0 API features for Core.
+
+"""
+
+from ..sql.selectable import Select
+from ..util.langhelpers import public_factory
+
+select = public_factory(Select._create_select, ".expression.select")
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 15319e049..f19ec5673 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -642,8 +642,6 @@ class Query(Generative):
"""Return the full SELECT statement represented by this
:class:`.Query`, converted to a scalar subquery.
- Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.as_scalar`.
-
"""
return self.scalar_subquery()
@@ -1477,18 +1475,15 @@ class Query(Generative):
for c in column:
_ColumnEntity(self, c)
- @util.pending_deprecation(
- "0.7",
- ":meth:`.add_column` is superseded " "by :meth:`.add_columns`",
- False,
+ @util.deprecated(
+ "1.4",
+ ":meth:`.Query.add_column` is deprecated and will be removed in a "
+ "future release. Please use :meth:`.Query.add_columns`",
)
def add_column(self, column):
"""Add a column expression to the list of result columns to be
returned.
- Pending deprecation: :meth:`.add_column` will be superseded by
- :meth:`.add_columns`.
-
"""
return self.add_columns(column)
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
index dddbadd63..6bada51dd 100644
--- a/lib/sqlalchemy/sql/dml.py
+++ b/lib/sqlalchemy/sql/dml.py
@@ -46,19 +46,82 @@ class UpdateBase(
_prefixes = ()
named_with_column = False
+ @classmethod
+ def _constructor_20_deprecations(cls, fn_name, clsname, names):
+
+ param_to_method_lookup = dict(
+ whereclause=(
+ "The :paramref:`.%(func)s.whereclause` parameter "
+ "will be removed "
+ "in SQLAlchemy 2.0. Please refer to the "
+ ":meth:`.%(classname)s.where` method."
+ ),
+ values=(
+ "The :paramref:`.%(func)s.values` parameter will be removed "
+ "in SQLAlchemy 2.0. Please refer to the "
+ ":meth:`.%(classname)s.values` method."
+ ),
+ bind=(
+ "The :paramref:`.%(func)s.bind` parameter will be removed in "
+ "SQLAlchemy 2.0. Please use explicit connection execution."
+ ),
+ inline=(
+ "The :paramref:`.%(func)s.inline` parameter will be "
+ "removed in "
+ "SQLAlchemy 2.0. Please use the "
+ ":meth:`.%(classname)s.inline` method."
+ ),
+ prefixes=(
+ "The :paramref:`.%(func)s.prefixes parameter will be "
+ "removed in "
+ "SQLAlchemy 2.0. Please use the "
+ ":meth:`.%(classname)s.prefix_with` "
+ "method."
+ ),
+ return_defaults=(
+ "The :paramref:`.%(func)s.return_defaults` parameter will be "
+ "removed in SQLAlchemy 2.0. Please use the "
+ ":meth:`.%(classname)s.return_defaults` method."
+ ),
+ returning=(
+ "The :paramref:`.%(func)s.returning` parameter will be "
+ "removed in SQLAlchemy 2.0. Please use the "
+ ":meth:`.%(classname)s.returning`` method."
+ ),
+ preserve_parameter_order=(
+ "The :paramref:`%(func)s.preserve_parameter_order` parameter "
+ "will be removed in SQLAlchemy 2.0. Use the "
+ ":meth:`.%(classname)s.ordered_values` method with a list "
+ "of tuples. "
+ ),
+ )
+
+ return util.deprecated_params(
+ **{
+ name: (
+ "2.0",
+ param_to_method_lookup[name]
+ % {"func": fn_name, "classname": clsname},
+ )
+ for name in names
+ }
+ )
+
def _generate_fromclause_column_proxies(self, fromclause):
fromclause._columns._populate_separate_keys(
col._make_proxy(fromclause) for col in self._returning
)
- def _process_colparams(self, parameters):
+ def _process_colparams(self, parameters, preserve_parameter_order=False):
def process_single(p):
if isinstance(p, (list, tuple)):
return dict((c.key, pval) for c, pval in zip(self.table.c, p))
else:
return p
- if self._preserve_parameter_order and parameters is not None:
+ if (
+ preserve_parameter_order or self._preserve_parameter_order
+ ) and parameters is not None:
if not isinstance(parameters, list) or (
parameters and not isinstance(parameters[0], tuple)
):
@@ -492,6 +555,18 @@ class Insert(ValuesBase):
_supports_multi_parameters = True
+ @ValuesBase._constructor_20_deprecations(
+ "insert",
+ "Insert",
+ [
+ "values",
+ "inline",
+ "bind",
+ "prefixes",
+ "returning",
+ "return_defaults",
+ ],
+ )
def __init__(
self,
table,
@@ -549,7 +624,7 @@ class Insert(ValuesBase):
:ref:`inserts_and_updates` - SQL Expression Tutorial
"""
- ValuesBase.__init__(self, table, values, prefixes)
+ super(Insert, self).__init__(table, values, prefixes)
self._bind = bind
self.select = self.select_names = None
self.include_insert_from_select_defaults = False
@@ -565,6 +640,25 @@ class Insert(ValuesBase):
return ()
@_generative
+ def inline(self):
+ """Make this :class:`.Insert` construct "inline" .
+
+ When set, no attempt will be made to retrieve the
+ SQL-generated default values to be provided within the statement;
+ in particular,
+ this allows SQL expressions to be rendered 'inline' within the
+ statement without the need to pre-execute them beforehand; for
+ backends that support "returning", this turns off the "implicit
+ returning" feature for the statement.
+
+
+ .. versionchanged:: 1.4 the :paramref:`.Insert.inline` parameter
+ is now superseded by the :meth:`.Insert.inline` method.
+
+ """
+ self.inline = True
+
+ @_generative
def from_select(self, names, select, include_defaults=True):
"""Return a new :class:`.Insert` construct which represents
an ``INSERT...FROM SELECT`` statement.
@@ -636,6 +730,20 @@ class Update(ValuesBase):
__visit_name__ = "update"
+ @ValuesBase._constructor_20_deprecations(
+ "update",
+ "Update",
+ [
+ "whereclause",
+ "values",
+ "inline",
+ "bind",
+ "prefixes",
+ "returning",
+ "return_defaults",
+ "preserve_parameter_order",
+ ],
+ )
def __init__(
self,
table,
@@ -761,8 +869,9 @@ class Update(ValuesBase):
"""
+
self._preserve_parameter_order = preserve_parameter_order
- ValuesBase.__init__(self, table, values, prefixes)
+ super(Update, self).__init__(table, values, prefixes)
self._bind = bind
self._returning = returning
if whereclause is not None:
@@ -782,6 +891,62 @@ class Update(ValuesBase):
return ()
@_generative
+ def ordered_values(self, *args):
+ """Specify the VALUES clause of this UPDATE statement with an explicit
+ parameter ordering that will be maintained in the SET clause of the
+ resulting UPDATE statement.
+
+ E.g.::
+
+ stmt = table.update().ordered_values(
+ ("name", "ed"), ("ident": "foo")
+ )
+
+ .. seealso::
+
+ :ref:`updates_order_parameters` - full example of the
+ :paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`
+ flag
+
+ .. versionchanged:: 1.4 The :meth:`.Update.ordered_values` method
+ supersedes the :paramref:`.update.preserve_parameter_order`
+ parameter, which will be removed in SQLAlchemy 2.0.
+
+ """
+ if self.select is not None:
+ raise exc.InvalidRequestError(
+ "This construct already inserts from a SELECT"
+ )
+
+ if self.parameters is None:
+ (
+ self.parameters,
+ self._has_multi_parameters,
+ ) = self._process_colparams(
+ list(args), preserve_parameter_order=True
+ )
+ else:
+ raise exc.ArgumentError(
+ "This statement already has values present"
+ )
+
+ @_generative
+ def inline(self):
+ """Make this :class:`.Update` construct "inline" .
+
+ When set, SQL defaults present on :class:`.Column` objects via the
+ ``default`` keyword will be compiled 'inline' into the statement and
+ not pre-executed. This means that their values will not be available
+ in the dictionary returned from
+ :meth:`.ResultProxy.last_updated_params`.
+
+ .. versionchanged:: 1.4 the :paramref:`.update.inline` parameter
+ is now superseded by the :meth:`.Update.inline` method.
+
+ """
+ self.inline = True
+
+ @_generative
def where(self, whereclause):
"""return a new update() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
@@ -821,6 +986,11 @@ class Delete(UpdateBase):
__visit_name__ = "delete"
+ @ValuesBase._constructor_20_deprecations(
+ "delete",
+ "Delete",
+ ["whereclause", "values", "bind", "prefixes", "returning"],
+ )
def __init__(
self,
table,
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index 068fc6809..137b1605a 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -341,7 +341,7 @@ class FunctionElement(Executable, ColumnElement, FromClause):
s = select([function_element])
"""
- s = Select([self])
+ s = Select._create_select(self)
if self._execution_options:
s = s.execution_options(**self._execution_options)
return s
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index db743f408..b2ec32c13 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -369,7 +369,8 @@ class FromClause(HasMemoized, roles.AnonymizedFromClauseRole, Selectable):
col = list(self.primary_key)[0]
else:
col = list(self.columns)[0]
- return Select(
+ return Select._create_select_from_fromclause(
+ self,
[functions.func.count(col).label("tbl_row_count")],
whereclause,
from_obj=[self],
@@ -1018,7 +1019,11 @@ class Join(FromClause):
"""
collist = [self.left, self.right]
- return Select(collist, whereclause, from_obj=[self], **kwargs)
+ if whereclause is not None:
+ kwargs["whereclause"] = whereclause
+ return Select._create_select_from_fromclause(
+ self, collist, **kwargs
+ ).select_from(self)
@property
def bind(self):
@@ -1142,7 +1147,7 @@ class Join(FromClause):
full=self.full,
)
else:
- return self.select(use_labels=True, correlate=False).alias(name)
+ return self.select().apply_labels().correlate(None).alias(name)
@property
def _hide_froms(self):
@@ -1155,6 +1160,21 @@ class Join(FromClause):
return [self] + self.left._from_objects + self.right._from_objects
+class NoInit(object):
+ def __init__(self, *arg, **kw):
+ raise NotImplementedError(
+ "The %s class is not intended to be constructed "
+ "directly. Please use the %s() standalone "
+ "function or the %s() method available from appropriate "
+ "selectable objects."
+ % (
+ self.__class__.__name__,
+ self.__class__.__name__.lower(),
+ self.__class__.__name__.lower(),
+ )
+ )
+
+
# FromClause ->
# AliasedReturnsRows
# -> Alias only for FromClause
@@ -1163,7 +1183,7 @@ class Join(FromClause):
# -> Lateral -> FromClause, but we accept SelectBase
# w/ non-deprecated coercion
# -> TableSample -> only for FromClause
-class AliasedReturnsRows(FromClause):
+class AliasedReturnsRows(NoInit, FromClause):
"""Base class of aliases against tables, subqueries, and other
selectables."""
@@ -1175,19 +1195,6 @@ class AliasedReturnsRows(FromClause):
("name", InternalTraversal.dp_anon_name),
]
- def __init__(self, *arg, **kw):
- raise NotImplementedError(
- "The %s class is not intended to be constructed "
- "directly. Please use the %s() standalone "
- "function or the %s() method available from appropriate "
- "selectable objects."
- % (
- self.__class__.__name__,
- self.__class__.__name__.lower(),
- self.__class__.__name__.lower(),
- )
- )
-
@classmethod
def _construct(cls, *arg, **kw):
obj = cls.__new__(cls)
@@ -3046,7 +3053,7 @@ class DeprecatedSelectGenerations(object):
:class:`.Select` object.
"""
- self.column.non_generative(self, column)
+ self.add_columns.non_generative(self, column)
@util.deprecated(
"1.4",
@@ -3171,6 +3178,38 @@ class Select(
+ SupportsCloneAnnotations._traverse_internals
)
+ @classmethod
+ def _create_select(cls, *entities):
+ self = cls.__new__(cls)
+ self._raw_columns = [
+ coercions.expect(roles.ColumnsClauseRole, ent)
+ for ent in util.to_list(entities)
+ ]
+
+ # this should all go away once Select is converted to have
+ # default state at the class level
+ self._auto_correlate = True
+ self._from_obj = util.OrderedSet()
+ self._whereclause = None
+ self._having = None
+
+ GenerativeSelect.__init__(self)
+
+ return self
+
+ @classmethod
+ def _create_select_from_fromclause(cls, target, entities, *arg, **kw):
+ if arg or kw:
+ util.warn_deprecated_20(
+ "Passing arguments to %s.select() is deprecated and "
+ "will be removed in SQLAlchemy 2.0. Please use generative "
+ "methods such as select().where(), etc."
+ % (target.__class__.__name__,)
+ )
+ return Select(entities, *arg, **kw)
+ else:
+ return Select._create_select(*entities)
+
@util.deprecated_params(
autocommit=(
"0.6",
@@ -3201,7 +3240,7 @@ class Select(
suffixes=None,
**kwargs
):
- """Construct a new :class:`.Select`.
+ """Construct a new :class:`.Select` using the 1.x style API.
Similar functionality is also available via the
:meth:`.FromClause.select` method on any :class:`.FromClause`.
@@ -3387,6 +3426,13 @@ class Select(
:meth:`.Select.apply_labels`
"""
+ util.warn_deprecated_20(
+ "The select() function in SQLAlchemy 2.0 will accept a "
+ "series of columns / tables and other entities only, "
+ "passed positionally. For forwards compatibility, use the "
+ "sqlalchemy.future.select() construct."
+ )
+
self._auto_correlate = correlate
if distinct is not False:
self._distinct = True
@@ -3418,8 +3464,6 @@ class Select(
self._raw_columns = []
for c in columns:
c = coercions.expect(roles.ColumnsClauseRole, c)
- if isinstance(c, ScalarSelect):
- c = c.self_group(against=operators.comma_op)
self._raw_columns.append(c)
else:
self._raw_columns = []
@@ -3446,7 +3490,6 @@ class Select(
GenerativeSelect.__init__(self, **kwargs)
- # @_memoized_property
@property
def _froms(self):
# current roadblock to caching is two tests that test that the
@@ -3741,13 +3784,13 @@ class Select(
)
@_generative
- def column(self, column):
- """return a new select() construct with the given column expression
+ def add_columns(self, *columns):
+ """return a new select() construct with the given column expressions
added to its columns clause.
E.g.::
- my_select = my_select.column(table.c.new_column)
+ my_select = my_select.add_columns(table.c.new_column)
See the documentation for :meth:`.Select.with_only_columns`
for guidelines on adding /replacing the columns of a
@@ -3755,12 +3798,32 @@ class Select(
"""
self._reset_memoizations()
- column = coercions.expect(roles.ColumnsClauseRole, column)
- if isinstance(column, ScalarSelect):
- column = column.self_group(against=operators.comma_op)
+ self._raw_columns = self._raw_columns + [
+ coercions.expect(roles.ColumnsClauseRole, column)
+ for column in columns
+ ]
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.Select.column` method is deprecated and will "
+ "be removed in a future release. Please use "
+ ":meth:`.Select.add_columns",
+ )
+ def column(self, column):
+ """return a new select() construct with the given column expression
+ added to its columns clause.
+
+ E.g.::
+
+ my_select = my_select.column(table.c.new_column)
+
+ See the documentation for :meth:`.Select.with_only_columns`
+ for guidelines on adding /replacing the columns of a
+ :class:`.Select` object.
- self._raw_columns = self._raw_columns + [column]
+ """
+ return self.add_columns(column)
@util.dependencies("sqlalchemy.sql.util")
def reduce_columns(self, sqlutil, only_synonyms=True):
@@ -4340,7 +4403,9 @@ class Exists(UnaryExpression):
return element.self_group(against=operators.exists)
def select(self, whereclause=None, **params):
- return Select([self], whereclause, **params)
+ if whereclause is not None:
+ params["whereclause"] = whereclause
+ return Select._create_select_from_fromclause(self, [self], **params)
def correlate(self, *fromclause):
e = self._clone()
diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py
index 889ae27e3..08f543b47 100644
--- a/lib/sqlalchemy/testing/warnings.py
+++ b/lib/sqlalchemy/testing/warnings.py
@@ -31,6 +31,8 @@ def setup_filters():
"ignore", category=DeprecationWarning, message=".*inspect.get.*argspec"
)
+ warnings.filterwarnings("ignore", category=sa_exc.RemovedIn20Warning)
+
def assert_warnings(fn, warning_msgs, regex=False):
"""Assert that each of the given warnings are emitted by fn.
diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py
index 30e384027..d2428bf75 100644
--- a/lib/sqlalchemy/util/__init__.py
+++ b/lib/sqlalchemy/util/__init__.py
@@ -88,12 +88,12 @@ from .compat import win32 # noqa
from .compat import with_metaclass # noqa
from .compat import zip_longest # noqa
from .deprecations import deprecated # noqa
+from .deprecations import deprecated_20 # noqa
from .deprecations import deprecated_cls # noqa
from .deprecations import deprecated_params # noqa
from .deprecations import inject_docstring_text # noqa
-from .deprecations import pending_deprecation # noqa
from .deprecations import warn_deprecated # noqa
-from .deprecations import warn_pending_deprecation # noqa
+from .deprecations import warn_deprecated_20 # noqa
from .langhelpers import add_parameter_text # noqa
from .langhelpers import as_interface # noqa
from .langhelpers import asbool # noqa
diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py
index 058fe0c71..0db2c72ae 100644
--- a/lib/sqlalchemy/util/deprecations.py
+++ b/lib/sqlalchemy/util/deprecations.py
@@ -22,8 +22,10 @@ def warn_deprecated(msg, stacklevel=3):
warnings.warn(msg, exc.SADeprecationWarning, stacklevel=stacklevel)
-def warn_pending_deprecation(msg, stacklevel=3):
- warnings.warn(msg, exc.SAPendingDeprecationWarning, stacklevel=stacklevel)
+def warn_deprecated_20(msg, stacklevel=3):
+ msg += "(Background on SQLAlchemy 2.0 at: http://sqlalche.me/e/b8d9)"
+
+ warnings.warn(msg, exc.RemovedIn20Warning, stacklevel=stacklevel)
def deprecated_cls(version, message, constructor="__init__"):
@@ -41,7 +43,9 @@ def deprecated_cls(version, message, constructor="__init__"):
return decorate
-def deprecated(version, message=None, add_deprecation_to_docstring=True):
+def deprecated(
+ version, message=None, add_deprecation_to_docstring=True, warning=None
+):
"""Decorates a function and issues a deprecation warning on use.
:param version:
@@ -66,17 +70,33 @@ def deprecated(version, message=None, add_deprecation_to_docstring=True):
if message is None:
message = "Call to deprecated function %(func)s"
+ if warning is None:
+ warning = exc.SADeprecationWarning
+
def decorate(fn):
return _decorate_with_warning(
- fn,
- exc.SADeprecationWarning,
- message % dict(func=fn.__name__),
- header,
+ fn, warning, message % dict(func=fn.__name__), header
)
return decorate
+def deprecated_20(api_name, alternative=None, **kw):
+ message = (
+ "The %s() function/method is considered legacy as of the "
+ "1.x series of SQLAlchemy and will be removed in 2.0." % api_name
+ )
+
+ if alternative:
+ message += " " + alternative
+
+ message += " (Background on SQLAlchemy 2.0 at: http://sqlalche.me/e/b8d9)"
+
+ return deprecated(
+ "2.0", message=message, warning=exc.RemovedIn20Warning, **kw
+ )
+
+
def deprecated_params(**specs):
"""Decorates a function to warn on use of certain parameters.
@@ -94,8 +114,14 @@ def deprecated_params(**specs):
"""
messages = {}
+ version_warnings = {}
for param, (version, message) in specs.items():
messages[param] = _sanitize_restructured_text(message)
+ version_warnings[param] = (
+ exc.RemovedIn20Warning
+ if version == "2.0"
+ else exc.SADeprecationWarning
+ )
def decorate(fn):
spec = compat.inspect_getfullargspec(fn)
@@ -115,14 +141,16 @@ def deprecated_params(**specs):
@decorator
def warned(fn, *args, **kwargs):
for m in check_defaults:
- if kwargs[m] != defaults[m]:
+ if (defaults[m] is None and kwargs[m] is not None) or (
+ defaults[m] is not None and kwargs[m] != defaults[m]
+ ):
warnings.warn(
- messages[m], exc.SADeprecationWarning, stacklevel=3
+ messages[m], version_warnings[m], stacklevel=3
)
for m in check_kw:
if m in kwargs:
warnings.warn(
- messages[m], exc.SADeprecationWarning, stacklevel=3
+ messages[m], version_warnings[m], stacklevel=3
)
return fn(*args, **kwargs)
@@ -143,44 +171,6 @@ def deprecated_params(**specs):
return decorate
-def pending_deprecation(
- version, message=None, add_deprecation_to_docstring=True
-):
- """Decorates a function and issues a pending deprecation warning on use.
-
- :param version:
- An approximate future version at which point the pending deprecation
- will become deprecated. Not used in messaging.
-
- :param message:
- If provided, issue message in the warning. A sensible default
- is used if not provided.
-
- :param add_deprecation_to_docstring:
- Default True. If False, the wrapped function's __doc__ is left
- as-is. If True, the 'message' is prepended to the docs if
- provided, or sensible default if message is omitted.
- """
-
- if add_deprecation_to_docstring:
- header = ".. deprecated:: %s (pending) %s" % (version, (message or ""))
- else:
- header = None
-
- if message is None:
- message = "Call to deprecated function %(func)s"
-
- def decorate(fn):
- return _decorate_with_warning(
- fn,
- exc.SAPendingDeprecationWarning,
- message % dict(func=fn.__name__),
- header,
- )
-
- return decorate
-
-
def deprecated_option_value(parameter_value, default_value, warning_text):
if parameter_value is None:
return default_value
diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py
index 25349157c..9426847ba 100644
--- a/test/orm/inheritance/test_single.py
+++ b/test/orm/inheritance/test_single.py
@@ -199,7 +199,7 @@ class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
eq_(
session.query(Manager.name)
- .add_column(ealias.name)
+ .add_columns(ealias.name)
.join(ealias, true())
.all(),
[("Tom", "Kurt"), ("Tom", "Ed")],
diff --git a/test/orm/test_deferred.py b/test/orm/test_deferred.py
index f55eefc4a..5acfa3f79 100644
--- a/test/orm/test_deferred.py
+++ b/test/orm/test_deferred.py
@@ -834,7 +834,7 @@ class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest):
o1 = (
sess.query(Order)
.order_by(Order.id)
- .add_column(orders.c.description)
+ .add_columns(orders.c.description)
.first()
)[0]
diff --git a/test/orm/test_deprecations.py b/test/orm/test_deprecations.py
index d5a46e9ea..cb41b384c 100644
--- a/test/orm/test_deprecations.py
+++ b/test/orm/test_deprecations.py
@@ -416,6 +416,20 @@ class DeprecatedQueryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"subquery object."
)
+ def test_invalid_column(self):
+ User = self.classes.User
+
+ s = create_session()
+ q = s.query(User.id)
+
+ with testing.expect_deprecated(r"Query.add_column\(\) is deprecated"):
+ q = q.add_column(User.name)
+
+ self.assert_compile(
+ q,
+ "SELECT users.id AS users_id, users.name AS users_name FROM users",
+ )
+
def test_via_textasfrom_select_from(self):
User = self.classes.User
s = create_session()
diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py
index 7195f53cb..08b68232b 100644
--- a/test/orm/test_froms.py
+++ b/test/orm/test_froms.py
@@ -252,7 +252,7 @@ class QueryCorrelatesLikeSelect(QueryTest, AssertsCompiledSQL):
orm_subq = sess.query(u_alias).filter(u_alias.id > User.id).exists()
self.assert_compile(
- q.add_column(raw_subq),
+ q.add_columns(raw_subq),
"SELECT anon_1.users_id AS anon_1_users_id, "
"anon_1.users_name AS anon_1_users_name, "
"EXISTS (SELECT * FROM users AS users_1 "
@@ -265,7 +265,7 @@ class QueryCorrelatesLikeSelect(QueryTest, AssertsCompiledSQL):
# only difference is "1" vs. "*" (not sure why that is)
self.assert_compile(
- q.add_column(orm_subq),
+ q.add_columns(orm_subq),
"SELECT anon_1.users_id AS anon_1_users_id, "
"anon_1.users_name AS anon_1_users_name, "
"EXISTS (SELECT 1 FROM users AS users_1 "
@@ -545,7 +545,7 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL):
eq_(
sess.query(User.id)
.from_self()
- .add_column(func.count().label("foo"))
+ .add_columns(func.count().label("foo"))
.group_by(User.id)
.order_by(User.id)
.from_self()
@@ -1835,7 +1835,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sess.query(User)
.limit(1)
.options(joinedload("addresses"))
- .add_column(User.name)
+ .add_columns(User.name)
.all()
)
eq_(results, [(User(name="jack"), "jack")])
@@ -2109,11 +2109,11 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
expected = [(u, u.name) for u in sess.query(User).all()]
for add_col in (User.name, users.c.name):
- assert sess.query(User).add_column(add_col).all() == expected
+ assert sess.query(User).add_columns(add_col).all() == expected
sess.expunge_all()
assert_raises(
- sa_exc.ArgumentError, sess.query(User).add_column, object()
+ sa_exc.ArgumentError, sess.query(User).add_columns, object()
)
def test_add_multi_columns(self):
@@ -2124,12 +2124,12 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sess = create_session()
eq_(
- sess.query(User.id).add_column(users).all(),
+ sess.query(User.id).add_columns(users).all(),
[(7, 7, "jack"), (8, 8, "ed"), (9, 9, "fred"), (10, 10, "chuck")],
)
def test_multi_columns_2(self):
- """test aliased/nonalised joins with the usage of add_column()"""
+ """test aliased/nonalised joins with the usage of add_columns()"""
User, Address, addresses, users = (
self.classes.User,
@@ -2148,7 +2148,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
q.group_by(users)
.order_by(User.id)
.outerjoin("addresses")
- .add_column(func.count(Address.id).label("count"))
+ .add_columns(func.count(Address.id).label("count"))
)
eq_(q.all(), expected)
sess.expunge_all()
@@ -2159,7 +2159,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
q.group_by(users)
.order_by(User.id)
.outerjoin(adalias, "addresses")
- .add_column(func.count(adalias.id).label("count"))
+ .add_columns(func.count(adalias.id).label("count"))
)
eq_(q.all(), expected)
sess.expunge_all()
@@ -2173,7 +2173,9 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
.order_by(User.id)
)
q = sess.query(User)
- result = q.add_column(s.selected_columns.count).from_statement(s).all()
+ result = (
+ q.add_columns(s.selected_columns.count).from_statement(s).all()
+ )
assert result == expected
def test_raw_columns(self):
@@ -2196,8 +2198,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
q = (
create_session()
.query(User)
- .add_column(func.count(adalias.c.id))
- .add_column(("Name:" + users.c.name))
+ .add_columns(func.count(adalias.c.id), ("Name:" + users.c.name))
.outerjoin(adalias, "addresses")
.group_by(users)
.order_by(users.c.id)
@@ -2218,8 +2219,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
)
q = create_session().query(User)
result = (
- q.add_column(s.selected_columns.count)
- .add_column(s.selected_columns.concat)
+ q.add_columns(s.selected_columns.count, s.selected_columns.concat)
.from_statement(s)
.all()
)
@@ -2231,8 +2231,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
q = (
create_session()
.query(User)
- .add_column(func.count(addresses.c.id))
- .add_column(("Name:" + users.c.name))
+ .add_columns(func.count(addresses.c.id), ("Name:" + users.c.name))
.select_entity_from(users.outerjoin(addresses))
.group_by(users)
.order_by(users.c.id)
@@ -2244,8 +2243,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
q = (
create_session()
.query(User)
- .add_column(func.count(addresses.c.id))
- .add_column(("Name:" + users.c.name))
+ .add_columns(func.count(addresses.c.id), ("Name:" + users.c.name))
.outerjoin("addresses")
.group_by(users)
.order_by(users.c.id)
@@ -2257,8 +2255,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
q = (
create_session()
.query(User)
- .add_column(func.count(adalias.c.id))
- .add_column(("Name:" + users.c.name))
+ .add_columns(func.count(adalias.c.id), ("Name:" + users.c.name))
.outerjoin(adalias, "addresses")
.group_by(users)
.order_by(users.c.id)
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 55809ad38..882255cc8 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -1045,14 +1045,14 @@ class InvalidGenerationsTest(QueryTest, AssertsCompiledSQL):
s = create_session()
q = s.query(User)
- assert_raises(sa_exc.ArgumentError, q.add_column, object())
+ assert_raises(sa_exc.ArgumentError, q.add_columns, object())
def test_invalid_column_tuple(self):
User = self.classes.User
s = create_session()
q = s.query(User)
- assert_raises(sa_exc.ArgumentError, q.add_column, (1, 1))
+ assert_raises(sa_exc.ArgumentError, q.add_columns, (1, 1))
def test_distinct(self):
"""test that a distinct() call is not valid before 'clauseelement'
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index b53acf61e..a7e9916cd 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -963,7 +963,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
s = select(
[], exists([1], table2.c.otherid == table1.c.myid), from_obj=table1
)
- s.column.non_generative(s, table1)
+ s.add_columns.non_generative(s, table1)
self.assert_compile(
s,
"SELECT mytable.myid, mytable.name, "
@@ -4932,7 +4932,7 @@ class ResultMapTest(fixtures.TestBase):
orig = [t.c.x, t.c.y, l1, l2, l3]
stmt = select(orig)
wrapped = stmt._generate()
- wrapped = wrapped.column(
+ wrapped = wrapped.add_columns(
func.ROW_NUMBER().over(order_by=t.c.z)
).alias()
@@ -4968,7 +4968,7 @@ class ResultMapTest(fixtures.TestBase):
eq_(len(stmt._columns_plus_names), 7)
wrapped = stmt._generate()
- wrapped = wrapped.column(
+ wrapped = wrapped.add_columns(
func.ROW_NUMBER().over(order_by=t.c.z)
).alias()
diff --git a/test/sql/test_deprecations.py b/test/sql/test_deprecations.py
index 4e88dcdb8..b2b1f470b 100644
--- a/test/sql/test_deprecations.py
+++ b/test/sql/test_deprecations.py
@@ -132,6 +132,17 @@ class DeprecationWarningsTest(fixtures.TestBase, AssertsCompiledSQL):
):
self.assert_compile(or_(and_()), "")
+ def test_fromclause_count(self):
+ with testing.expect_deprecated(
+ r"The FromClause.count\(\) method is deprecated, and will be "
+ r"removed in a future release."
+ ):
+ self.assert_compile(
+ table("q", column("x")).count(),
+ "SELECT count(q.x) AS tbl_row_count FROM q",
+ dialect="default",
+ )
+
class ConvertUnicodeDeprecationTest(fixtures.TestBase):
@@ -645,6 +656,16 @@ class SelectableTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT anon_1.a FROM (SELECT 1 AS a ORDER BY 1) AS anon_1",
)
+ def test_column(self):
+ stmt = select([column("x")])
+ with testing.expect_deprecated(
+ r"The Select.column\(\) method is deprecated and will be "
+ "removed in a future release."
+ ):
+ stmt = stmt.column(column("q"))
+
+ self.assert_compile(stmt, "SELECT x, q")
+
def test_append_column_after_replace_selectable(self):
basesel = select([literal_column("1").label("a")])
tojoin = select(
diff --git a/test/sql/test_external_traversal.py b/test/sql/test_external_traversal.py
index 7001f757f..84d99d886 100644
--- a/test/sql/test_external_traversal.py
+++ b/test/sql/test_external_traversal.py
@@ -1831,7 +1831,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
s, "SELECT table1.col1, table1.col2, " "table1.col3 FROM table1"
)
- select_copy = s.column(column("yyy"))
+ select_copy = s.add_columns(column("yyy"))
self.assert_compile(
select_copy,
"SELECT table1.col1, table1.col2, " "table1.col3, yyy FROM table1",
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index c3655efd2..cc2df16a9 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -346,8 +346,10 @@ class SelectableTest(
sel = select([literal_column("1").label("a")])
eq_(list(sel.selected_columns.keys()), ["a"])
cloned = visitors.ReplacingCloningVisitor().traverse(sel)
- cloned.column.non_generative(cloned, literal_column("2").label("b"))
- cloned.column.non_generative(cloned, func.foo())
+ cloned.add_columns.non_generative(
+ cloned, literal_column("2").label("b")
+ )
+ cloned.add_columns.non_generative(cloned, func.foo())
eq_(list(cloned.selected_columns.keys()), ["a", "b", "foo()"])
def test_clone_col_list_changes_then_proxy(self):
@@ -355,7 +357,7 @@ class SelectableTest(
stmt = select([t.c.q]).subquery()
def add_column(stmt):
- stmt.column.non_generative(stmt, t.c.p)
+ stmt.add_columns.non_generative(stmt, t.c.p)
stmt2 = visitors.cloned_traverse(stmt, {}, {"select": add_column})
eq_(list(stmt.c.keys()), ["q"])
@@ -366,7 +368,7 @@ class SelectableTest(
stmt = select([t.c.q]).subquery()
def add_column(stmt):
- stmt.column.non_generative(stmt, t.c.p)
+ stmt.add_columns.non_generative(stmt, t.c.p)
stmt2 = visitors.cloned_traverse(stmt, {}, {"select": add_column})
eq_(list(stmt.c.keys()), ["q"])
@@ -396,7 +398,7 @@ class SelectableTest(
"JOIN (SELECT 1 AS a, 2 AS b) AS joinfrom "
"ON basefrom.a = joinfrom.a",
)
- replaced.column.non_generative(replaced, joinfrom.c.b)
+ replaced.add_columns.non_generative(replaced, joinfrom.c.b)
self.assert_compile(
replaced,
"SELECT basefrom.a, joinfrom.b FROM (SELECT 1 AS a) AS basefrom "
@@ -859,7 +861,7 @@ class SelectableTest(
def test_join(self):
a = join(table1, table2)
- print(str(a.select(use_labels=True)))
+ print(str(a.select().apply_labels()))
b = table2.alias("b")
j = join(a, b)
print(str(j))
@@ -874,7 +876,7 @@ class SelectableTest(
self.assert_(criterion.compare(j.onclause))
def test_subquery_labels_join(self):
- a = table1.select(use_labels=True).subquery()
+ a = table1.select().apply_labels().subquery()
j = join(a, table2)
criterion = a.c.table1_col1 == table2.c.col2
@@ -1580,7 +1582,7 @@ class JoinConditionTest(fixtures.TestBase, AssertsCompiledSQL):
t1t2 = t1.join(t2)
t2t3 = t2.join(t3)
- st2t3 = t2t3.select(use_labels=True).subquery()
+ st2t3 = t2t3.select().apply_labels().subquery()
j = t1t2.join(st2t3)
assert j.onclause.compare(t2.c.id == st2t3.c.t3_t2id)
self.assert_compile(
@@ -2016,7 +2018,8 @@ class ReduceTest(fixtures.TestBase, AssertsExecutionResults):
pjoin = (
people.outerjoin(engineers)
.outerjoin(managers)
- .select(use_labels=True)
+ .select()
+ .apply_labels()
.alias("pjoin")
)
eq_(
diff --git a/test/sql/test_text.py b/test/sql/test_text.py
index ae4be65ad..58de41709 100644
--- a/test/sql/test_text.py
+++ b/test/sql/test_text.py
@@ -91,8 +91,7 @@ class SelectCompositionTest(fixtures.TestBase, AssertsCompiledSQL):
def test_select_composition_two(self):
s = select()
- s = s.column(column("column1"))
- s = s.column(column("column2"))
+ s = s.add_columns(column("column1"), column("column2"))
s = s.where(text("column1=12"))
s = s.where(text("column2=19"))
s = s.order_by("column1")
diff --git a/test/sql/test_update.py b/test/sql/test_update.py
index 0313db832..68db6270f 100644
--- a/test/sql/test_update.py
+++ b/test/sql/test_update.py
@@ -505,7 +505,7 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
column_keys=["j"],
)
- def test_update_ordered_parameters_1(self):
+ def test_update_ordered_parameters_oldstyle_1(self):
table1 = self.tables.mytable
# Confirm that we can pass values as list value pairs
@@ -534,7 +534,35 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
"mytable.name = :param_2 || mytable.name || :param_3",
)
- def test_update_ordered_parameters_2(self):
+ def test_update_ordered_parameters_newstyle_1(self):
+ table1 = self.tables.mytable
+
+ # Confirm that we can pass values as list value pairs
+ # note these are ordered *differently* from table.c
+ values = [
+ (table1.c.name, table1.c.name + "lala"),
+ (table1.c.myid, func.do_stuff(table1.c.myid, literal("hoho"))),
+ ]
+ self.assert_compile(
+ update(table1)
+ .where(
+ (table1.c.myid == func.hoho(4))
+ & (
+ table1.c.name
+ == literal("foo") + table1.c.name + literal("lala")
+ )
+ )
+ .ordered_values(*values),
+ "UPDATE mytable "
+ "SET "
+ "name=(mytable.name || :name_1), "
+ "myid=do_stuff(mytable.myid, :param_1) "
+ "WHERE "
+ "mytable.myid = hoho(:hoho_1) AND "
+ "mytable.name = :param_2 || mytable.name || :param_3",
+ )
+
+ def test_update_ordered_parameters_oldstyle_2(self):
table1 = self.tables.mytable
# Confirm that we can pass values as list value pairs
@@ -564,6 +592,35 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
"mytable.name = :param_2 || mytable.name || :param_3",
)
+ def test_update_ordered_parameters_newstyle_2(self):
+ table1 = self.tables.mytable
+
+ # Confirm that we can pass values as list value pairs
+ # note these are ordered *differently* from table.c
+ values = [
+ (table1.c.name, table1.c.name + "lala"),
+ ("description", "some desc"),
+ (table1.c.myid, func.do_stuff(table1.c.myid, literal("hoho"))),
+ ]
+ self.assert_compile(
+ update(
+ table1,
+ (table1.c.myid == func.hoho(4))
+ & (
+ table1.c.name
+ == literal("foo") + table1.c.name + literal("lala")
+ ),
+ ).ordered_values(*values),
+ "UPDATE mytable "
+ "SET "
+ "name=(mytable.name || :name_1), "
+ "description=:description, "
+ "myid=do_stuff(mytable.myid, :param_1) "
+ "WHERE "
+ "mytable.myid = hoho(:hoho_1) AND "
+ "mytable.name = :param_2 || mytable.name || :param_3",
+ )
+
def test_update_ordered_parameters_fire_onupdate(self):
table = self.tables.update_w_default