summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2013-04-21 17:18:49 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2013-04-21 17:18:49 -0400
commit3aff498e4a96eda06f09f09f98e73e135719b388 (patch)
treef1ca2029cfd147478447d3cb98bae587a8ccb3c2
parent1f6528ed8581ba63721bdc2a0593a5d39b9c27e0 (diff)
parentfbcdba12f88d88c509fc34eb8aab3f501d1b705b (diff)
downloadsqlalchemy-3aff498e4a96eda06f09f09f98e73e135719b388.tar.gz
merge into cymysql branch...
-rw-r--r--.hgtags1
-rw-r--r--README.dialects.rst8
-rw-r--r--doc/build/builder/changelog.py10
-rw-r--r--doc/build/changelog/changelog_02.rst2
-rw-r--r--doc/build/changelog/changelog_03.rst2
-rw-r--r--doc/build/changelog/changelog_04.rst6
-rw-r--r--doc/build/changelog/changelog_06.rst4
-rw-r--r--doc/build/changelog/changelog_07.rst65
-rw-r--r--doc/build/changelog/changelog_08.rst321
-rw-r--r--doc/build/changelog/migration_08.rst73
-rw-r--r--doc/build/conf.py4
-rw-r--r--doc/build/core/tutorial.rst972
-rw-r--r--doc/build/core/types.rst2
-rw-r--r--doc/build/dialects/index.rst4
-rw-r--r--doc/build/dialects/mysql.rst5
-rw-r--r--doc/build/glossary.rst64
-rw-r--r--doc/build/intro.rst2
-rw-r--r--doc/build/orm/session.rst2
-rw-r--r--doc/build/testdocs.py3
-rw-r--r--examples/dogpile_caching/caching_query.py37
-rw-r--r--lib/sqlalchemy/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/__init__.py1
-rw-r--r--lib/sqlalchemy/dialects/firebird/kinterbasdb.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py4
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py4
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py3
-rw-r--r--lib/sqlalchemy/dialects/mysql/cymysql.py1
-rw-r--r--lib/sqlalchemy/dialects/mysql/gaerdbms.py4
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py12
-rw-r--r--lib/sqlalchemy/dialects/postgresql/hstore.py5
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/pyodbc.py2
-rw-r--r--lib/sqlalchemy/engine/base.py120
-rw-r--r--lib/sqlalchemy/engine/ddl.py22
-rw-r--r--lib/sqlalchemy/engine/default.py8
-rw-r--r--lib/sqlalchemy/engine/result.py8
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py2
-rw-r--r--lib/sqlalchemy/orm/attributes.py70
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py7
-rw-r--r--lib/sqlalchemy/orm/interfaces.py14
-rw-r--r--lib/sqlalchemy/orm/mapper.py35
-rw-r--r--lib/sqlalchemy/orm/persistence.py2
-rw-r--r--lib/sqlalchemy/orm/properties.py106
-rw-r--r--lib/sqlalchemy/orm/relationships.py27
-rw-r--r--lib/sqlalchemy/orm/session.py103
-rw-r--r--lib/sqlalchemy/orm/state.py27
-rw-r--r--lib/sqlalchemy/orm/strategies.py14
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py12
-rw-r--r--lib/sqlalchemy/orm/util.py37
-rw-r--r--lib/sqlalchemy/pool.py4
-rw-r--r--lib/sqlalchemy/schema.py19
-rw-r--r--lib/sqlalchemy/sql/compiler.py51
-rw-r--r--lib/sqlalchemy/sql/expression.py185
-rw-r--r--lib/sqlalchemy/sql/util.py7
-rw-r--r--lib/sqlalchemy/testing/assertsql.py2
-rw-r--r--lib/sqlalchemy/testing/plugin/noseplugin.py8
-rw-r--r--lib/sqlalchemy/testing/profiling.py4
-rw-r--r--lib/sqlalchemy/testing/runner.py10
-rw-r--r--lib/sqlalchemy/testing/schema.py27
-rw-r--r--lib/sqlalchemy/testing/suite/test_insert.py4
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py10
-rw-r--r--lib/sqlalchemy/testing/util.py3
-rw-r--r--lib/sqlalchemy/types.py18
-rw-r--r--lib/sqlalchemy/util/__init__.py19
-rw-r--r--lib/sqlalchemy/util/_collections.py40
-rw-r--r--lib/sqlalchemy/util/compat.py184
-rw-r--r--lib/sqlalchemy/util/langhelpers.py60
-rw-r--r--lib/sqlalchemy/util/topological.py3
-rw-r--r--setup.cfg4
-rw-r--r--test/aaa_profiling/test_compiler.py12
-rw-r--r--test/aaa_profiling/test_memusage.py3
-rw-r--r--test/dialect/test_mssql.py55
-rw-r--r--test/dialect/test_oracle.py5
-rw-r--r--test/dialect/test_postgresql.py19
-rw-r--r--test/engine/test_ddlemit.py184
-rw-r--r--test/engine/test_execute.py2
-rw-r--r--test/engine/test_reconnect.py160
-rw-r--r--test/ext/declarative/test_inheritance.py43
-rw-r--r--test/ext/test_serializer.py2
-rw-r--r--test/orm/inheritance/test_basic.py118
-rw-r--r--test/orm/inheritance/test_polymorphic_rel.py1
-rw-r--r--test/orm/test_attributes.py41
-rw-r--r--test/orm/test_cascade.py43
-rw-r--r--test/orm/test_compile.py32
-rw-r--r--test/orm/test_default_strategies.py1
-rw-r--r--test/orm/test_froms.py67
-rw-r--r--test/orm/test_instrumentation.py14
-rw-r--r--test/orm/test_joins.py2
-rw-r--r--test/orm/test_mapper.py51
-rw-r--r--test/orm/test_query.py29
-rw-r--r--test/orm/test_rel_fn.py104
-rw-r--r--test/orm/test_session.py144
-rw-r--r--test/orm/test_subquery_relations.py160
-rw-r--r--test/orm/test_transaction.py70
-rw-r--r--test/orm/test_unitofwork.py16
-rw-r--r--test/perf/stress_all.py2
-rw-r--r--test/profiles.txt6
-rw-r--r--test/sql/test_compiler.py645
-rw-r--r--test/sql/test_constraints.py13
-rw-r--r--test/sql/test_delete.py86
-rw-r--r--test/sql/test_functions.py2
-rw-r--r--test/sql/test_generative.py293
-rw-r--r--test/sql/test_insert.py312
-rw-r--r--test/sql/test_labels.py102
-rw-r--r--test/sql/test_metadata.py86
-rw-r--r--test/sql/test_query.py35
-rw-r--r--test/sql/test_returning.py20
-rw-r--r--test/sql/test_selectable.py153
-rw-r--r--test/sql/test_types.py1
-rw-r--r--test/sql/test_update.py646
113 files changed, 4648 insertions, 2043 deletions
diff --git a/.hgtags b/.hgtags
index 85921a7e2..402707e37 100644
--- a/.hgtags
+++ b/.hgtags
@@ -89,3 +89,4 @@ ebe9514a69a4b4ec6209f0e9aa43053ba28d080b rel_0_7_5
d557287431986274a796348750f1c6ce885b196c rel_0_7_6
6495bcf87e10461675d8905d62f5632e634ec33c rel_0_8_0b1
8d82961d34643c8d53d865ddf76911807a36fde6 rel_0_8_8b2
+662aaaa7bc65c47db7ecd2e0269f8a8fbb613acd rel_0_8_0
diff --git a/README.dialects.rst b/README.dialects.rst
index 2e1d20db8..26bc1edaf 100644
--- a/README.dialects.rst
+++ b/README.dialects.rst
@@ -112,7 +112,13 @@ Key aspects of this file layout include:
from sqlalchemy.testing import runner
- runner.main()
+ # use this in setup.py 'test_suite':
+ # test_suite="run_tests.setup_py_test"
+ def setup_py_test():
+ runner.setup_py_test()
+
+ if __name__ == '__main__':
+ runner.main()
Where above, the ``registry`` module, introduced in SQLAlchemy 0.8, provides
an in-Python means of installing the dialect entrypoints without the use
diff --git a/doc/build/builder/changelog.py b/doc/build/builder/changelog.py
index 85ae6dba0..41a403ad0 100644
--- a/doc/build/builder/changelog.py
+++ b/doc/build/builder/changelog.py
@@ -142,6 +142,16 @@ class ChangeLogDirective(EnvDirective, Directive):
self._parsed_content['released']))
else:
topsection.append(nodes.Text("no release date"))
+
+ intro_para = nodes.paragraph('', '')
+ for len_, text in enumerate(self._parsed_content['text']):
+ if ".. change::" in text:
+ break
+ if len_:
+ self.state.nested_parse(self._parsed_content['text'][0:len_], 0,
+ intro_para)
+ topsection.append(intro_para)
+
return topsection
diff --git a/doc/build/changelog/changelog_02.rst b/doc/build/changelog/changelog_02.rst
index 3053659a8..600dcc6eb 100644
--- a/doc/build/changelog/changelog_02.rst
+++ b/doc/build/changelog/changelog_02.rst
@@ -678,7 +678,7 @@
modified version (works in py2.3/2.4!) that uses a threading.RLock
for a mutex. this is to fix a reported case where a ConnectionFairy's
__del__() method got called within the Queue's get() method, which
- then returns its connection to the Queue via the the put() method,
+ then returns its connection to the Queue via the put() method,
causing a reentrant hang unless threading.RLock is used.
.. change::
diff --git a/doc/build/changelog/changelog_03.rst b/doc/build/changelog/changelog_03.rst
index c1944c705..e47da340a 100644
--- a/doc/build/changelog/changelog_03.rst
+++ b/doc/build/changelog/changelog_03.rst
@@ -2092,7 +2092,7 @@
:tickets:
added a mutex to the mapper compilation step. ive been reluctant to add any
- kind of threading anything to SA but this is one spot that its its really
+ kind of threading anything to SA but this is one spot that its really
needed since mappers are typically "global", and while their state does not
change during normal operation, the initial compilation step does modify
internal state significantly, and this step usually occurs not at
diff --git a/doc/build/changelog/changelog_04.rst b/doc/build/changelog/changelog_04.rst
index 37b424df7..e1acfe4c0 100644
--- a/doc/build/changelog/changelog_04.rst
+++ b/doc/build/changelog/changelog_04.rst
@@ -2036,7 +2036,7 @@
new synonym() behavior: an attribute will be placed on the mapped
class, if one does not exist already, in all cases. if a property
already exists on the class, the synonym will decorate the property
- with the appropriate comparison operators so that it can be used in in
+ with the appropriate comparison operators so that it can be used in
column expressions just like any other mapped attribute (i.e. usable in
filter(), etc.) the "proxy=True" flag is deprecated and no longer means
anything. Additionally, the flag "map_column=True" will automatically
@@ -2872,7 +2872,7 @@
:tickets:
PG reflection, upon seeing the default schema name being used explicitly
- as the "schema" argument in a Table, will assume that this is the the
+ as the "schema" argument in a Table, will assume that this is the
user's desired convention, and will explicitly set the "schema" argument
in foreign-key-related reflected tables, thus making them match only
with Table constructors that also use the explicit "schema" argument
@@ -2929,7 +2929,7 @@
:tickets: 810
Fixed breakage with postgres and multiple two-phase transactions. Two-phase
- commits and and rollbacks didn't automatically end up with a new transaction
+ commits and rollbacks didn't automatically end up with a new transaction
as the usual dbapi commits/rollbacks do.
.. change::
diff --git a/doc/build/changelog/changelog_06.rst b/doc/build/changelog/changelog_06.rst
index 0ec7027ce..c7f4dcdea 100644
--- a/doc/build/changelog/changelog_06.rst
+++ b/doc/build/changelog/changelog_06.rst
@@ -943,7 +943,7 @@
:tickets: 1953
The cx_oracle "decimal detection" logic, which takes place
- for for result set columns with ambiguous numeric characteristics,
+ for result set columns with ambiguous numeric characteristics,
now uses the decimal point character determined by the locale/
NLS_LANG setting, using an on-first-connect detection of
this character. cx_oracle 5.0.3 or greater is also required
@@ -2851,7 +2851,7 @@
:tickets: 1071
Postgresql now reflects sequence names associated with
- SERIAL columns correctly, after the name of of the sequence
+ SERIAL columns correctly, after the name of the sequence
has been changed. Thanks to Kumar McMillan for the patch.
.. change::
diff --git a/doc/build/changelog/changelog_07.rst b/doc/build/changelog/changelog_07.rst
index f520caf34..c650e769f 100644
--- a/doc/build/changelog/changelog_07.rst
+++ b/doc/build/changelog/changelog_07.rst
@@ -3,6 +3,71 @@
0.7 Changelog
==============
+.. changelog::
+ :version: 0.7.11
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2699
+
+ Fixed bug when a query of the form:
+ ``query(SubClass).options(subqueryload(Baseclass.attrname))``,
+ where ``SubClass`` is a joined inh of ``BaseClass``,
+ would fail to apply the ``JOIN`` inside the subquery
+ on the attribute load, producing a cartesian product.
+ The populated results still tended to be correct as additional
+ rows are just ignored, so this issue may be present as a
+ performance degradation in applications that are
+ otherwise working correctly.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2689
+
+ Fixed bug in unit of work whereby a joined-inheritance
+ subclass could insert the row for the "sub" table
+ before the parent table, if the two tables had no
+ ForeignKey constraints set up between them.
+
+ .. change::
+ :tags: feature, postgresql
+ :tickets: 2676
+
+ Added support for Postgresql's traditional SUBSTRING
+ function syntax, renders as "SUBSTRING(x FROM y FOR z)"
+ when regular ``func.substring()`` is used.
+ Courtesy Gunnlaugur Þór Briem.
+
+ .. change::
+ :tags: bug, tests
+ :tickets: 2669
+ :pullreq: 41
+
+ Fixed an import of "logging" in test_execute which was not
+ working on some linux platforms.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2674
+
+ Improved the error message emitted when a "backref loop" is detected,
+ that is when an attribute event triggers a bidirectional
+ assignment between two other attributes with no end.
+ This condition can occur not just when an object of the wrong
+ type is assigned, but also when an attribute is mis-configured
+ to backref into an existing backref pair.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2674
+
+ A warning is emitted when a MapperProperty is assigned to a mapper
+ that replaces an existing property, if the properties in question
+ aren't plain column-based properties. Replacement of relationship
+ properties is rarely (ever?) what is intended and usually refers to a
+ mapper mis-configuration. This will also warn if a backref configures
+ itself on top of an existing one in an inheritance relationship
+ (which is an error in 0.8).
.. changelog::
:version: 0.7.10
diff --git a/doc/build/changelog/changelog_08.rst b/doc/build/changelog/changelog_08.rst
index 218700ba8..2a2925726 100644
--- a/doc/build/changelog/changelog_08.rst
+++ b/doc/build/changelog/changelog_08.rst
@@ -4,7 +4,328 @@
==============
.. changelog::
+ :version: 0.8.1
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2708
+
+ Improved the behavior of instance management regarding
+ the creation of strong references within the Session;
+ an object will no longer have an internal reference cycle
+ created if it's in the transient state or moves into the
+ detached state - the strong ref is created only when the
+ object is attached to a Session and is removed when the
+ object is detached. This makes it somewhat safer for an
+ object to have a `__del__()` method, even though this is
+ not recommended, as relationships with backrefs produce
+ cycles too. A warning has been added when a class with
+ a `__del__()` method is mapped.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2702
+
+ A major fix to the way in which a select() object produces
+ labeled columns when apply_labels() is used; this mode
+ produces a SELECT where each column is labeled as in
+ <tablename>_<columnname>, to remove column name collisions
+ for a multiple table select. The fix is that if two labels
+ collide when combined with the table name, i.e.
+ "foo.bar_id" and "foo_bar.id", anonymous aliasing will be
+ applied to one of the dupes. This allows the ORM to handle
+ both columns independently; previously, 0.7
+ would in some cases silently emit a second SELECT for the
+ column that was "duped", and in 0.8 an ambiguous column error
+ would be emitted. The "keys" applied to the .c. collection
+ of the select() will also be deduped, so that the "column
+ being replaced" warning will no longer emit for any select()
+ that specifies use_labels, though the dupe key will be given
+ an anonymous label which isn't generally user-friendly.
+
+ .. change::
+ :tags: bug, mysql
+ :pullreq: 54
+
+ Updated a regexp to correctly extract error code on
+ google app engine v1.7.5 and newer. Courtesy
+ Dan Ring.
+
+ .. change::
+ :tags: bug, examples
+
+ Fixed a long-standing bug in the caching example, where
+ the limit/offset parameter values wouldn't be taken into
+ account when computing the cache key. The
+ _key_from_query() function has been simplified to work
+ directly from the final compiled statement in order to get
+ at both the full statement as well as the fully processed
+ parameter list.
+
+ .. change::
+ :tags: bug, mssql
+ :tickets: 2355
+
+ Part of a longer series of fixes needed for pyodbc+
+ mssql, a CAST to NVARCHAR(max) has been added to the bound
+ parameter for the table name and schema name in all information schema
+ queries to avoid the issue of comparing NVARCHAR to NTEXT,
+ which seems to be rejected by the ODBC driver in some cases,
+ such as FreeTDS (0.91 only?) plus unicode bound parameters being passed.
+ The issue seems to be specific to the SQL Server information
+ schema tables and the workaround is harmless for those cases
+ where the problem doesn't exist in the first place.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2691
+
+ Fixed bug where disconnect detect on error would
+ raise an attribute error if the error were being
+ raised after the Connection object had already
+ been closed.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2703
+
+ Reworked internal exception raises that emit
+ a rollback() before re-raising, so that the stack
+ trace is preserved from sys.exc_info() before entering
+ the rollback. This so that the traceback is preserved
+ when using coroutine frameworks which may have switched
+ contexts before the rollback function returns.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2697
+
+ Fixed bug whereby ORM would run the wrong kind of
+ query when refreshing an inheritance-mapped class
+ where the superclass was mapped to a non-Table
+ object, like a custom join() or a select(),
+ running a query that assumed a hierarchy that's
+ mapped to individual Table-per-class.
+
+ .. change::
+ :tags: bug, orm
+
+ Fixed `__repr__()` on mapper property constructs
+ to work before the object is initialized, so
+ that Sphinx builds with recent Sphinx versions
+ can read them.
+
+ .. change::
+ :tags: bug, sql, postgresql
+
+ The _Binary base type now converts values through
+ the bytes() callable when run on Python 3; in particular
+ psycopg2 2.5 with Python 3.3 seems to now be returning
+ the "memoryview" type, so this is converted to bytes
+ before return.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2695
+
+ Improvements to Connection auto-invalidation
+ handling. If a non-disconnect error occurs,
+ but leads to a delayed disconnect error within error
+ handling (happens with MySQL), the disconnect condition
+ is detected. The Connection can now also be closed
+ when in an invalid state, meaning it will raise "closed"
+ on next usage, and additionally the "close with result"
+ feature will work even if the autorollback in an error
+ handling routine fails and regardless of whether the
+ condition is a disconnect or not.
+
+
+ .. change::
+ :tags: bug, orm, declarative
+ :tickets: 2656
+
+ Fixed indirect regression regarding :func:`.has_inherited_table`,
+ where since it considers the current class' ``__table__``, was
+ sensitive to when it was called. This is 0.7's behavior also,
+ but in 0.7 things tended to "work out" within events like
+ ``__mapper_args__()``. :func:`.has_inherited_table` now only
+ considers superclasses, so should return the same answer
+ regarding the current class no matter when it's called
+ (obviously assuming the state of the superclass).
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2699
+
+ Fixed bug when a query of the form:
+ ``query(SubClass).options(subqueryload(Baseclass.attrname))``,
+ where ``SubClass`` is a joined inh of ``BaseClass``,
+ would fail to apply the ``JOIN`` inside the subquery
+ on the attribute load, producing a cartesian product.
+ The populated results still tended to be correct as additional
+ rows are just ignored, so this issue may be present as a
+ performance degradation in applications that are
+ otherwise working correctly. Also in 0.7.11.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2689
+
+ Fixed bug in unit of work whereby a joined-inheritance
+ subclass could insert the row for the "sub" table
+ before the parent table, if the two tables had no
+ ForeignKey constraints set up between them.
+ Also in 0.7.11.
+
+ .. change::
+ :tags: bug, mssql
+ :pullreq: 47
+
+ Added support for additional "disconnect" messages
+ to the pymssql dialect. Courtesy John Anderson.
+
+ .. change::
+ :tags: feature, sql
+
+ Loosened the check on dialect-specific argument names
+ passed to Table(); since we want to support external dialects
+ and also want to support args without a certain dialect
+ being installed, it only checks the format of the arg now,
+ rather than looking for that dialect in sqlalchemy.dialects.
+
+ .. change::
+ :tags: bug, sql
+
+ Fixed bug whereby a DBAPI that can return "0"
+ for cursor.lastrowid would not function correctly
+ in conjunction with :attr:`.ResultProxy.inserted_primary_key`.
+
+ .. change::
+ :tags: bug, mssql
+ :tickets: 2683
+ :pullreq: 46
+
+ Fixed Py3K bug regarding "binary" types and
+ pymssql. Courtesy Marc Abramowitz.
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2680
+
+ Added missing HSTORE type to postgresql type names
+ so that the type can be reflected.
+
+.. changelog::
:version: 0.8.0
+ :released: March 9, 2013
+
+ .. note::
+
+ There are some new behavioral changes as of 0.8.0
+ not present in 0.8.0b2. They are present in the
+ migration document as follows:
+
+ * :ref:`legacy_is_orphan_addition`
+
+ * :ref:`metadata_create_drop_tables`
+
+ * :ref:`correlation_context_specific`
+
+ .. change::
+ :tags: feature, postgresql
+ :tickets: 2676
+
+ Added support for Postgresql's traditional SUBSTRING
+ function syntax, renders as "SUBSTRING(x FROM y FOR z)"
+ when regular ``func.substring()`` is used.
+ Also in 0.7.11. Courtesy Gunnlaugur Þór Briem.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2675
+
+ A meaningful :attr:`.QueryableAttribute.info` attribute is
+ added, which proxies down to the ``.info`` attribute on either
+ the :class:`.schema.Column` object if directly present, or
+ the :class:`.MapperProperty` otherwise. The full behavior
+ is documented and ensured by tests to remain stable.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2668
+
+ The behavior of SELECT correlation has been improved such that
+ the :meth:`.Select.correlate` and :meth:`.Select.correlate_except`
+ methods, as well as their ORM analogues, will still retain
+ "auto-correlation" behavior in that the FROM clause is modified
+ only if the output would be legal SQL; that is, the FROM clause
+ is left intact if the correlated SELECT is not used in the context
+ of an enclosing SELECT inside of the WHERE, columns, or HAVING clause.
+ The two methods now only specify conditions to the default
+ "auto correlation", rather than absolute FROM lists.
+
+ .. change::
+ :tags: feature, mysql
+ :pullreq: 42
+
+ New dialect for CyMySQL added, courtesy Hajime Nakagami.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2674
+
+ Improved checking for an existing backref name conflict during
+ mapper configuration; will now test for name conflicts on
+ superclasses and subclasses, in addition to the current mapper,
+ as these conflicts break things just as much. This is new for
+ 0.8, but see below for a warning that will also be triggered
+ in 0.7.11.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2674
+
+ Improved the error message emitted when a "backref loop" is detected,
+ that is when an attribute event triggers a bidirectional
+ assignment between two other attributes with no end.
+ This condition can occur not just when an object of the wrong
+ type is assigned, but also when an attribute is mis-configured
+ to backref into an existing backref pair. Also in 0.7.11.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2674
+
+ A warning is emitted when a MapperProperty is assigned to a mapper
+ that replaces an existing property, if the properties in question
+ aren't plain column-based properties. Replacement of relationship
+ properties is rarely (ever?) what is intended and usually refers to a
+ mapper mis-configuration. Also in 0.7.11.
+
+ .. change::
+ :tags: feature, orm
+
+ Can set/change the "cascade" attribute on a :func:`.relationship`
+ construct after it's been constructed already. This is not
+ a pattern for normal use but we like to change the setting
+ for demonstration purposes in tutorials.
+
+ .. change::
+ :tags: bug, schema
+ :tickets: 2664
+
+ :meth:`.MetaData.create_all` and :meth:`.MetaData.drop_all` will
+ now accommodate an empty list as an instruction to not create/drop
+ any items, rather than ignoring the collection.
+
+
+ .. change::
+ :tags: bug, tests
+ :tickets: 2669
+ :pullreq: 41
+
+ Fixed an import of "logging" in test_execute which was not
+ working on some linux platforms. Also in 0.7.11.
.. change::
:tags: bug, orm
diff --git a/doc/build/changelog/migration_08.rst b/doc/build/changelog/migration_08.rst
index 06a9402f6..971dd2f51 100644
--- a/doc/build/changelog/migration_08.rst
+++ b/doc/build/changelog/migration_08.rst
@@ -7,9 +7,10 @@ What's New in SQLAlchemy 0.8?
This document describes changes between SQLAlchemy version 0.7,
undergoing maintenance releases as of October, 2012,
and SQLAlchemy version 0.8, which is expected for release
- in late 2012.
+ in early 2013.
Document date: October 25, 2012
+ Updated: March 9, 2013
Introduction
============
@@ -1145,6 +1146,76 @@ entity, ``query.correlate(someentity)``.
:ticket:`2179`
+.. _correlation_context_specific:
+
+Correlation is now always context-specific
+------------------------------------------
+
+To allow a wider variety of correlation scenarios, the behavior of
+:meth:`.Select.correlate` and :meth:`.Query.correlate` has changed slightly
+such that the SELECT statement will omit the "correlated" target from the
+FROM clause only if the statement is actually used in that context. Additionally,
+it's no longer possible for a SELECT statement that's placed as a FROM
+in an enclosing SELECT statement to "correlate" (i.e. omit) a FROM clause.
+
+This change only makes things better as far as rendering SQL, in that it's no
+longer possible to render illegal SQL where there are insufficient FROM
+objects relative to what's being selected::
+
+ from sqlalchemy.sql import table, column, select
+
+ t1 = table('t1', column('x'))
+ t2 = table('t2', column('y'))
+ s = select([t1, t2]).correlate(t1)
+
+ print(s)
+
+Prior to this change, the above would return::
+
+ SELECT t1.x, t2.y FROM t2
+
+which is invalid SQL as "t1" is not referred to in any FROM clause.
+
+Now, in the absense of an enclosing SELECT, it returns::
+
+ SELECT t1.x, t2.y FROM t1, t2
+
+Within a SELECT, the correlation takes effect as expected::
+
+ s2 = select([t1, t2]).where(t1.c.x == t2.c.y).where(t1.c.x == s)
+
+ print (s2)
+
+ SELECT t1.x, t2.y FROM t1, t2
+ WHERE t1.x = t2.y AND t1.x =
+ (SELECT t1.x, t2.y FROM t2)
+
+This change is not expected to impact any existing applications, as
+the correlation behavior remains identical for properly constructed
+expressions. Only an application that relies, most likely within a
+testing scenario, on the invalid string output of a correlated
+SELECT used in a non-correlating context would see any change.
+
+:ticket:`2668`
+
+
+.. _metadata_create_drop_tables:
+
+create_all() and drop_all() will now honor an empty list as such
+----------------------------------------------------------------
+
+The methods :meth:`.MetaData.create_all` and :meth:`.MetaData.drop_all`
+will now accept a list of :class:`.Table` objects that is empty,
+and will not emit any CREATE or DROP statements. Previously,
+an empty list was interepreted the same as passing ``None``
+for a collection, and CREATE/DROP would be emitted for all
+items unconditionally.
+
+This is a bug fix but some applications may have been relying upon
+the previous behavior.
+
+:ticket:`2664`
+
Repaired the Event Targeting of :class:`.InstrumentationEvents`
----------------------------------------------------------------
diff --git a/doc/build/conf.py b/doc/build/conf.py
index 8bfe2d2bf..34caedc22 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -83,9 +83,9 @@ copyright = u'2007-2013, the SQLAlchemy authors and contributors'
# The short X.Y version.
version = "0.8"
# The full version, including alpha/beta/rc tags.
-release = "0.8.0b2"
+release = "0.8.0"
-release_date = "December 14, 2012"
+release_date = "March 9, 2013"
site_base = "http://www.sqlalchemy.org"
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index a05e6ccdf..fd6c69bff 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -236,9 +236,9 @@ we use the ``connect()`` method::
>>> conn = engine.connect()
>>> conn #doctest: +ELLIPSIS
- <sqlalchemy.engine.Connection object at 0x...>
+ <sqlalchemy.engine.base.Connection object at 0x...>
-The :class:`~sqlalchemy.engine.Connection` object represents an actively
+The :class:`~sqlalchemy.engine.base.Connection` object represents an actively
checked out DBAPI connection resource. Lets feed it our
:class:`~sqlalchemy.sql.expression.Insert` object and see what happens:
@@ -252,7 +252,7 @@ checked out DBAPI connection resource. Lets feed it our
So the INSERT statement was now issued to the database. Although we got
positional "qmark" bind parameters instead of "named" bind parameters in the
output. How come ? Because when executed, the
-:class:`~sqlalchemy.engine.Connection` used the SQLite **dialect** to
+:class:`~sqlalchemy.engine.base.Connection` used the SQLite **dialect** to
help generate the statement; when we use the ``str()`` function, the statement
isn't aware of this dialect, and falls back onto a default which uses named
parameters. We can view this manually as follows:
@@ -264,9 +264,9 @@ parameters. We can view this manually as follows:
'INSERT INTO users (name, fullname) VALUES (?, ?)'
What about the ``result`` variable we got when we called ``execute()`` ? As
-the SQLAlchemy :class:`~sqlalchemy.engine.Connection` object references a
+the SQLAlchemy :class:`~sqlalchemy.engine.base.Connection` object references a
DBAPI connection, the result, known as a
-:class:`~sqlalchemy.engine.ResultProxy` object, is analogous to the DBAPI
+:class:`~sqlalchemy.engine.result.ResultProxy` object, is analogous to the DBAPI
cursor object. In the case of an INSERT, we can get important information from
it, such as the primary key values which were generated from our statement:
@@ -292,7 +292,7 @@ Our insert example above was intentionally a little drawn out to show some
various behaviors of expression language constructs. In the usual case, an
:class:`~sqlalchemy.sql.expression.Insert` statement is usually compiled
against the parameters sent to the ``execute()`` method on
-:class:`~sqlalchemy.engine.Connection`, so that there's no need to use
+:class:`~sqlalchemy.engine.base.Connection`, so that there's no need to use
the ``values`` keyword with :class:`~sqlalchemy.sql.expression.Insert`. Lets
create a generic :class:`~sqlalchemy.sql.expression.Insert` statement again
and use it in the "normal" way:
@@ -304,13 +304,13 @@ and use it in the "normal" way:
{opensql}INSERT INTO users (id, name, fullname) VALUES (?, ?, ?)
(2, 'wendy', 'Wendy Williams')
COMMIT
- {stop}<sqlalchemy.engine.ResultProxy object at 0x...>
+ {stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
-Above, because we specified all three columns in the the ``execute()`` method,
-the compiled :class:`~sqlalchemy.sql.expression.Insert` included all three
-columns. The :class:`~sqlalchemy.sql.expression.Insert` statement is compiled
+Above, because we specified all three columns in the ``execute()`` method,
+the compiled :class:`~.expression.Insert` included all three
+columns. The :class:`~.expression.Insert` statement is compiled
at execution time based on the parameters we specified; if we specified fewer
-parameters, the :class:`~sqlalchemy.sql.expression.Insert` would have fewer
+parameters, the :class:`~.expression.Insert` would have fewer
entries in its VALUES clause.
To issue many inserts using DBAPI's ``executemany()`` method, we can send in a
@@ -328,7 +328,7 @@ inserted, as we do here to add some email addresses:
{opensql}INSERT INTO addresses (user_id, email_address) VALUES (?, ?)
((1, 'jack@yahoo.com'), (1, 'jack@msn.com'), (2, 'www@www.org'), (2, 'wendy@aol.com'))
COMMIT
- {stop}<sqlalchemy.engine.ResultProxy object at 0x...>
+ {stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
Above, we again relied upon SQLite's automatic generation of primary key
identifiers for each ``addresses`` row.
@@ -363,10 +363,10 @@ Above, we issued a basic :func:`.select` call, placing the ``users`` table
within the COLUMNS clause of the select, and then executing. SQLAlchemy
expanded the ``users`` table into the set of each of its columns, and also
generated a FROM clause for us. The result returned is again a
-:class:`~sqlalchemy.engine.ResultProxy` object, which acts much like a
+:class:`~sqlalchemy.engine.result.ResultProxy` object, which acts much like a
DBAPI cursor, including methods such as
-:func:`~sqlalchemy.engine.ResultProxy.fetchone` and
-:func:`~sqlalchemy.engine.ResultProxy.fetchall`. The easiest way to get
+:func:`~sqlalchemy.engine.result.ResultProxy.fetchone` and
+:func:`~sqlalchemy.engine.result.ResultProxy.fetchall`. The easiest way to get
rows from it is to just iterate:
.. sourcecode:: pycon+sql
@@ -375,8 +375,6 @@ rows from it is to just iterate:
... print row
(1, u'jack', u'Jack Jones')
(2, u'wendy', u'Wendy Williams')
- (3, u'fred', u'Fred Flintstone')
- (4, u'mary', u'Mary Contrary')
Above, we see that printing each row produces a simple tuple-like result. We
have more options at accessing the data in each row. One very common way is
@@ -413,12 +411,10 @@ But another way, whose usefulness will become apparent later on, is to use the
()
{stop}name: jack ; fullname: Jack Jones
name: wendy ; fullname: Wendy Williams
- name: fred ; fullname: Fred Flintstone
- name: mary ; fullname: Mary Contrary
Result sets which have pending rows remaining should be explicitly closed
before discarding. While the cursor and connection resources referenced by the
-:class:`~sqlalchemy.engine.ResultProxy` will be respectively closed and
+:class:`~sqlalchemy.engine.result.ResultProxy` will be respectively closed and
returned to the connection pool when the object is garbage collected, it's
better to make it explicit as some database APIs are very picky about such
things:
@@ -444,8 +440,6 @@ the ``c`` attribute of the :class:`~sqlalchemy.schema.Table` object:
... print row
(u'jack', u'Jack Jones')
(u'wendy', u'Wendy Williams')
- (u'fred', u'Fred Flintstone')
- (u'mary', u'Mary Contrary')
Lets observe something interesting about the FROM clause. Whereas the
generated statement contains two distinct sections, a "SELECT columns" part
@@ -468,27 +462,20 @@ our :func:`.select` statement:
(2, u'wendy', u'Wendy Williams', 2, 1, u'jack@msn.com')
(2, u'wendy', u'Wendy Williams', 3, 2, u'www@www.org')
(2, u'wendy', u'Wendy Williams', 4, 2, u'wendy@aol.com')
- (3, u'fred', u'Fred Flintstone', 1, 1, u'jack@yahoo.com')
- (3, u'fred', u'Fred Flintstone', 2, 1, u'jack@msn.com')
- (3, u'fred', u'Fred Flintstone', 3, 2, u'www@www.org')
- (3, u'fred', u'Fred Flintstone', 4, 2, u'wendy@aol.com')
- (4, u'mary', u'Mary Contrary', 1, 1, u'jack@yahoo.com')
- (4, u'mary', u'Mary Contrary', 2, 1, u'jack@msn.com')
- (4, u'mary', u'Mary Contrary', 3, 2, u'www@www.org')
- (4, u'mary', u'Mary Contrary', 4, 2, u'wendy@aol.com')
It placed **both** tables into the FROM clause. But also, it made a real mess.
Those who are familiar with SQL joins know that this is a **Cartesian
product**; each row from the ``users`` table is produced against each row from
the ``addresses`` table. So to put some sanity into this statement, we need a
-WHERE clause. Which brings us to the second argument of :func:`.select`:
+WHERE clause. We do that using :meth:`.Select.where`:
.. sourcecode:: pycon+sql
- >>> s = select([users, addresses], users.c.id==addresses.c.user_id)
+ >>> s = select([users, addresses]).where(users.c.id == addresses.c.user_id)
{sql}>>> for row in conn.execute(s):
... print row # doctest: +NORMALIZE_WHITESPACE
- SELECT users.id, users.name, users.fullname, addresses.id, addresses.user_id, addresses.email_address
+ SELECT users.id, users.name, users.fullname, addresses.id,
+ addresses.user_id, addresses.email_address
FROM users, addresses
WHERE users.id = addresses.user_id
()
@@ -503,27 +490,27 @@ statement, and our results were managed down so that the join of ``users`` and
``addresses`` rows made sense. But let's look at that expression? It's using
just a Python equality operator between two different
:class:`~sqlalchemy.schema.Column` objects. It should be clear that something
-is up. Saying ``1==1`` produces ``True``, and ``1==2`` produces ``False``, not
+is up. Saying ``1 == 1`` produces ``True``, and ``1 == 2`` produces ``False``, not
a WHERE clause. So lets see exactly what that expression is doing:
.. sourcecode:: pycon+sql
- >>> users.c.id==addresses.c.user_id #doctest: +ELLIPSIS
+ >>> users.c.id == addresses.c.user_id #doctest: +ELLIPSIS
<sqlalchemy.sql.expression.BinaryExpression object at 0x...>
Wow, surprise ! This is neither a ``True`` nor a ``False``. Well what is it ?
.. sourcecode:: pycon+sql
- >>> str(users.c.id==addresses.c.user_id)
+ >>> str(users.c.id == addresses.c.user_id)
'users.id = addresses.user_id'
As you can see, the ``==`` operator is producing an object that is very much
-like the :class:`~sqlalchemy.sql.expression.Insert` and :func:`.select`
+like the :class:`~.expression.Insert` and :func:`.select`
objects we've made so far, thanks to Python's ``__eq__()`` builtin; you call
``str()`` on it and it produces SQL. By now, one can see that everything we
are working with is ultimately the same type of object. SQLAlchemy terms the
-base class of all of these expressions as ``sqlalchemy.sql.ClauseElement``.
+base class of all of these expressions as :class:`~.expression.ColumnElement`.
Operators
==========
@@ -533,7 +520,7 @@ some of its capabilities. We've seen how to equate two columns to each other:
.. sourcecode:: pycon+sql
- >>> print users.c.id==addresses.c.user_id
+ >>> print users.c.id == addresses.c.user_id
users.id = addresses.user_id
If we use a literal value (a literal meaning, not a SQLAlchemy clause object),
@@ -541,16 +528,16 @@ we get a bind parameter:
.. sourcecode:: pycon+sql
- >>> print users.c.id==7
+ >>> print users.c.id == 7
users.id = :id_1
-The ``7`` literal is embedded in
-:class:`~sqlalchemy.sql.expression.ClauseElement`; we can use the same trick
+The ``7`` literal is embedded the resulting
+:class:`~.expression.ColumnElement`; we can use the same trick
we did with the :class:`~sqlalchemy.sql.expression.Insert` object to see it:
.. sourcecode:: pycon+sql
- >>> (users.c.id==7).compile().params
+ >>> (users.c.id == 7).compile().params
{u'id_1': 7}
Most Python operators, as it turns out, produce a SQL expression here, like
@@ -576,8 +563,8 @@ If we add two integer columns together, we get an addition expression:
>>> print users.c.id + addresses.c.id
users.id + addresses.id
-Interestingly, the type of the :class:`~sqlalchemy.schema.Column` is important
-! If we use ``+`` with two string based columns (recall we put types like
+Interestingly, the type of the :class:`~sqlalchemy.schema.Column` is important!
+If we use ``+`` with two string based columns (recall we put types like
:class:`~sqlalchemy.types.Integer` and :class:`~sqlalchemy.types.String` on
our :class:`~sqlalchemy.schema.Column` objects at the beginning), we get
something different:
@@ -592,7 +579,8 @@ not all of them. MySQL users, fear not:
.. sourcecode:: pycon+sql
- >>> print (users.c.name + users.c.fullname).compile(bind=create_engine('mysql://'))
+ >>> print (users.c.name + users.c.fullname).\
+ ... compile(bind=create_engine('mysql://'))
concat(users.name, users.fullname)
The above illustrates the SQL that's generated for an
@@ -632,18 +620,26 @@ Conjunctions
We'd like to show off some of our operators inside of :func:`.select`
constructs. But we need to lump them together a little more, so let's first
introduce some conjunctions. Conjunctions are those little words like AND and
-OR that put things together. We'll also hit upon NOT. AND, OR and NOT can work
+OR that put things together. We'll also hit upon NOT. :func:`.and_`, :func:`.or_`,
+and :func:`.not_` can work
from the corresponding functions SQLAlchemy provides (notice we also throw in
-a LIKE):
+a :meth:`~.ColumnOperators.like`):
.. sourcecode:: pycon+sql
>>> from sqlalchemy.sql import and_, or_, not_
- >>> print and_(users.c.name.like('j%'), users.c.id==addresses.c.user_id, #doctest: +NORMALIZE_WHITESPACE
- ... or_(addresses.c.email_address=='wendy@aol.com', addresses.c.email_address=='jack@yahoo.com'),
- ... not_(users.c.id>5))
+ >>> print and_(
+ ... users.c.name.like('j%'),
+ ... users.c.id == addresses.c.user_id, #doctest: +NORMALIZE_WHITESPACE
+ ... or_(
+ ... addresses.c.email_address == 'wendy@aol.com',
+ ... addresses.c.email_address == 'jack@yahoo.com'
+ ... ),
+ ... not_(users.c.id > 5)
+ ... )
users.name LIKE :name_1 AND users.id = addresses.user_id AND
- (addresses.email_address = :email_address_1 OR addresses.email_address = :email_address_2)
+ (addresses.email_address = :email_address_1
+ OR addresses.email_address = :email_address_2)
AND users.id <= :id_1
And you can also use the re-jiggered bitwise AND, OR and NOT operators,
@@ -652,35 +648,43 @@ parenthesis:
.. sourcecode:: pycon+sql
- >>> print users.c.name.like('j%') & (users.c.id==addresses.c.user_id) & \
- ... ((addresses.c.email_address=='wendy@aol.com') | (addresses.c.email_address=='jack@yahoo.com')) \
+ >>> print users.c.name.like('j%') & (users.c.id == addresses.c.user_id) & \
+ ... (
+ ... (addresses.c.email_address == 'wendy@aol.com') | \
+ ... (addresses.c.email_address == 'jack@yahoo.com')
+ ... ) \
... & ~(users.c.id>5) # doctest: +NORMALIZE_WHITESPACE
users.name LIKE :name_1 AND users.id = addresses.user_id AND
- (addresses.email_address = :email_address_1 OR addresses.email_address = :email_address_2)
+ (addresses.email_address = :email_address_1
+ OR addresses.email_address = :email_address_2)
AND users.id <= :id_1
So with all of this vocabulary, let's select all users who have an email
address at AOL or MSN, whose name starts with a letter between "m" and "z",
and we'll also generate a column containing their full name combined with
their email address. We will add two new constructs to this statement,
-``between()`` and ``label()``. ``between()`` produces a BETWEEN clause, and
-``label()`` is used in a column expression to produce labels using the ``AS``
+:meth:`~.ColumnOperators.between` and :meth:`~.ColumnElement.label`.
+:meth:`~.ColumnOperators.between` produces a BETWEEN clause, and
+:meth:`~.ColumnElement.label` is used in a column expression to produce labels using the ``AS``
keyword; it's recommended when selecting from expressions that otherwise would
not have a name:
.. sourcecode:: pycon+sql
- >>> s = select([(users.c.fullname + ", " + addresses.c.email_address).label('title')],
- ... and_(
- ... users.c.id==addresses.c.user_id,
- ... users.c.name.between('m', 'z'),
- ... or_(
- ... addresses.c.email_address.like('%@aol.com'),
- ... addresses.c.email_address.like('%@msn.com')
+ >>> s = select([(users.c.fullname +
+ ... ", " + addresses.c.email_address).
+ ... label('title')]).\
+ ... where(
+ ... and_(
+ ... users.c.id == addresses.c.user_id,
+ ... users.c.name.between('m', 'z'),
+ ... or_(
+ ... addresses.c.email_address.like('%@aol.com'),
+ ... addresses.c.email_address.like('%@msn.com')
+ ... )
... )
... )
- ... )
- >>> print conn.execute(s).fetchall() #doctest: +NORMALIZE_WHITESPACE
+ >>> conn.execute(s).fetchall() #doctest: +NORMALIZE_WHITESPACE
SELECT users.fullname || ? || addresses.email_address AS title
FROM users, addresses
WHERE users.id = addresses.user_id AND users.name BETWEEN ? AND ? AND
@@ -693,6 +697,33 @@ it will determine the FROM clause based on all of its other bits; the columns
clause, the where clause, and also some other elements which we haven't
covered yet, which include ORDER BY, GROUP BY, and HAVING.
+A shortcut to using :func:`.and_` is to chain together multiple
+:meth:`~.Select.where` clauses. The above can also be written as:
+
+.. sourcecode:: pycon+sql
+
+ >>> s = select([(users.c.fullname +
+ ... ", " + addresses.c.email_address).
+ ... label('title')]).\
+ ... where(users.c.id == addresses.c.user_id).\
+ ... where(users.c.name.between('m', 'z')).\
+ ... where(
+ ... or_(
+ ... addresses.c.email_address.like('%@aol.com'),
+ ... addresses.c.email_address.like('%@msn.com')
+ ... )
+ ... )
+ >>> conn.execute(s).fetchall() #doctest: +NORMALIZE_WHITESPACE
+ SELECT users.fullname || ? || addresses.email_address AS title
+ FROM users, addresses
+ WHERE users.id = addresses.user_id AND users.name BETWEEN ? AND ? AND
+ (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
+ (', ', 'm', 'z', '%@aol.com', '%@msn.com')
+ [(u'Wendy Williams, wendy@aol.com',)]
+
+The way that we can build up a :func:`.select` construct through successive
+method calls is called :term:`method chaining`.
+
.. _sqlexpression_text:
Using Text
@@ -701,20 +732,23 @@ Using Text
Our last example really became a handful to type. Going from what one
understands to be a textual SQL expression into a Python construct which
groups components together in a programmatic style can be hard. That's why
-SQLAlchemy lets you just use strings too. The ``text()`` construct represents
-any textual statement. To use bind parameters with ``text()``, always use the
-named colon format. Such as below, we create a ``text()`` and execute it,
-feeding in the bind parameters to the ``execute()`` method:
+SQLAlchemy lets you just use strings too. The :func:`~.expression.text` construct represents
+any textual statement, in a backend-agnostic way.
+To use bind parameters with :func:`~.expression.text`, always use the
+named colon format. Such as below, we create a :func:`~.expression.text` and execute it,
+feeding in the bind parameters to the :meth:`~.Connection.execute` method:
.. sourcecode:: pycon+sql
>>> from sqlalchemy.sql import text
- >>> s = text("""SELECT users.fullname || ', ' || addresses.email_address AS title
- ... FROM users, addresses
- ... WHERE users.id = addresses.user_id AND users.name BETWEEN :x AND :y AND
- ... (addresses.email_address LIKE :e1 OR addresses.email_address LIKE :e2)
- ... """)
- {sql}>>> print conn.execute(s, x='m', y='z', e1='%@aol.com', e2='%@msn.com').fetchall() # doctest:+NORMALIZE_WHITESPACE
+ >>> s = text(
+ ... "SELECT users.fullname || ', ' || addresses.email_address AS title "
+ ... "FROM users, addresses "
+ ... "WHERE users.id = addresses.user_id "
+ ... "AND users.name BETWEEN :x AND :y "
+ ... "AND (addresses.email_address LIKE :e1 "
+ ... "OR addresses.email_address LIKE :e2)")
+ {sql}>>> conn.execute(s, x='m', y='z', e1='%@aol.com', e2='%@msn.com').fetchall() # doctest:+NORMALIZE_WHITESPACE
SELECT users.fullname || ', ' || addresses.email_address AS title
FROM users, addresses
WHERE users.id = addresses.user_id AND users.name BETWEEN ? AND ? AND
@@ -722,30 +756,33 @@ feeding in the bind parameters to the ``execute()`` method:
('m', 'z', '%@aol.com', '%@msn.com')
{stop}[(u'Wendy Williams, wendy@aol.com',)]
-To gain a "hybrid" approach, the `select()` construct accepts strings for most
+To gain a "hybrid" approach, the :func:`.select` construct accepts strings for most
of its arguments. Below we combine the usage of strings with our constructed
:func:`.select` object, by using the :func:`.select` object to structure the
statement, and strings to provide all the content within the structure. For
this example, SQLAlchemy is not given any :class:`~sqlalchemy.schema.Column`
or :class:`~sqlalchemy.schema.Table` objects in any of its expressions, so it
-cannot generate a FROM clause. So we also give it the ``from_obj`` keyword
-argument, which is a list of ``ClauseElements`` (or strings) to be placed
-within the FROM clause:
-
-.. sourcecode:: pycon+sql
-
- >>> s = select(["users.fullname || ', ' || addresses.email_address AS title"],
- ... and_(
- ... "users.id = addresses.user_id",
- ... "users.name BETWEEN 'm' AND 'z'",
- ... "(addresses.email_address LIKE :x OR addresses.email_address LIKE :y)"
- ... ),
- ... from_obj=['users', 'addresses']
- ... )
- {sql}>>> print conn.execute(s, x='%@aol.com', y='%@msn.com').fetchall() #doctest: +NORMALIZE_WHITESPACE
+cannot generate a FROM clause. So we also use the :meth:`~.Select.select_from`
+method, which accepts a :class:`.FromClause` or string expression
+to be placed within the FROM clause:
+
+.. sourcecode:: pycon+sql
+
+ >>> s = select([
+ ... "users.fullname || ', ' || addresses.email_address AS title"
+ ... ]).\
+ ... where(
+ ... and_(
+ ... "users.id = addresses.user_id",
+ ... "users.name BETWEEN 'm' AND 'z'",
+ ... "(addresses.email_address LIKE :x OR addresses.email_address LIKE :y)"
+ ... )
+ ... ).select_from('users, addresses')
+ {sql}>>> conn.execute(s, x='%@aol.com', y='%@msn.com').fetchall() #doctest: +NORMALIZE_WHITESPACE
SELECT users.fullname || ', ' || addresses.email_address AS title
FROM users, addresses
- WHERE users.id = addresses.user_id AND users.name BETWEEN 'm' AND 'z' AND (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
+ WHERE users.id = addresses.user_id AND users.name BETWEEN 'm' AND 'z'
+ AND (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
('%@aol.com', '%@msn.com')
{stop}[(u'Wendy Williams, wendy@aol.com',)]
@@ -756,9 +793,9 @@ construct. It also becomes more tedious for SQLAlchemy to be made aware of the
datatypes in use; for example, if our bind parameters required UTF-8 encoding
before going in, or conversion from a Python ``datetime`` into a string (as is
required with SQLite), we would have to add extra information to our
-``text()`` construct. Similar issues arise on the result set side, where
+:func:`~.expression.text` construct. Similar issues arise on the result set side, where
SQLAlchemy also performs type-specific data conversion in some cases; still
-more information can be added to ``text()`` to work around this. But what we
+more information can be added to :func:`~.expression.text` to work around this. But what we
really lose from our statement is the ability to manipulate it, transform it,
and analyze it. These features are critical when using the ORM, which makes
heavy usage of relational transformations. To show off what we mean, we'll
@@ -789,16 +826,20 @@ once for each address. We create two :class:`.Alias` constructs against
>>> a1 = addresses.alias()
>>> a2 = addresses.alias()
- >>> s = select([users], and_(
- ... users.c.id==a1.c.user_id,
- ... users.c.id==a2.c.user_id,
- ... a1.c.email_address=='jack@msn.com',
- ... a2.c.email_address=='jack@yahoo.com'
- ... ))
- {sql}>>> print conn.execute(s).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ >>> s = select([users]).\
+ ... where(and_(
+ ... users.c.id == a1.c.user_id,
+ ... users.c.id == a2.c.user_id,
+ ... a1.c.email_address == 'jack@msn.com',
+ ... a2.c.email_address == 'jack@yahoo.com'
+ ... ))
+ {sql}>>> conn.execute(s).fetchall() # doctest: +NORMALIZE_WHITESPACE
SELECT users.id, users.name, users.fullname
FROM users, addresses AS addresses_1, addresses AS addresses_2
- WHERE users.id = addresses_1.user_id AND users.id = addresses_2.user_id AND addresses_1.email_address = ? AND addresses_2.email_address = ?
+ WHERE users.id = addresses_1.user_id
+ AND users.id = addresses_2.user_id
+ AND addresses_1.email_address = ?
+ AND addresses_2.email_address = ?
('jack@msn.com', 'jack@yahoo.com')
{stop}[(1, u'jack', u'Jack Jones')]
@@ -826,12 +867,15 @@ to "correlate" the inner ``users`` table with the outer one:
.. sourcecode:: pycon+sql
>>> a1 = s.correlate(None).alias()
- >>> s = select([users.c.name], users.c.id==a1.c.id)
- {sql}>>> print conn.execute(s).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ >>> s = select([users.c.name]).where(users.c.id == a1.c.id)
+ {sql}>>> conn.execute(s).fetchall() # doctest: +NORMALIZE_WHITESPACE
SELECT users.name
- FROM users, (SELECT users.id AS id, users.name AS name, users.fullname AS fullname
- FROM users, addresses AS addresses_1, addresses AS addresses_2
- WHERE users.id = addresses_1.user_id AND users.id = addresses_2.user_id AND addresses_1.email_address = ? AND addresses_2.email_address = ?) AS anon_1
+ FROM users,
+ (SELECT users.id AS id, users.name AS name, users.fullname AS fullname
+ FROM users, addresses AS addresses_1, addresses AS addresses_2
+ WHERE users.id = addresses_1.user_id AND users.id = addresses_2.user_id
+ AND addresses_1.email_address = ?
+ AND addresses_2.email_address = ?) AS anon_1
WHERE users.id = anon_1.id
('jack@msn.com', 'jack@yahoo.com')
{stop}[(u'jack',)]
@@ -844,8 +888,8 @@ We're halfway along to being able to construct any SELECT expression. The next
cornerstone of the SELECT is the JOIN expression. We've already been doing
joins in our examples, by just placing two tables in either the columns clause
or the where clause of the :func:`.select` construct. But if we want to make a
-real "JOIN" or "OUTERJOIN" construct, we use the ``join()`` and
-``outerjoin()`` methods, most commonly accessed from the left table in the
+real "JOIN" or "OUTERJOIN" construct, we use the :meth:`~.FromClause.join` and
+:meth:`~.FromClause.outerjoin` methods, most commonly accessed from the left table in the
join:
.. sourcecode:: pycon+sql
@@ -866,34 +910,38 @@ username:
.. sourcecode:: pycon+sql
- >>> print users.join(addresses, addresses.c.email_address.like(users.c.name + '%'))
- users JOIN addresses ON addresses.email_address LIKE users.name || :name_1
+ >>> print users.join(addresses,
+ ... addresses.c.email_address.like(users.c.name + '%')
+ ... )
+ users JOIN addresses ON addresses.email_address LIKE (users.name || :name_1)
When we create a :func:`.select` construct, SQLAlchemy looks around at the
tables we've mentioned and then places them in the FROM clause of the
statement. When we use JOINs however, we know what FROM clause we want, so
-here we make usage of the ``from_obj`` keyword argument:
+here we make use of the :meth:`~.Select.select_from` method:
.. sourcecode:: pycon+sql
- >>> s = select([users.c.fullname], from_obj=[
- ... users.join(addresses, addresses.c.email_address.like(users.c.name + '%'))
- ... ])
- {sql}>>> print conn.execute(s).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ >>> s = select([users.c.fullname]).select_from(
+ ... users.join(addresses,
+ ... addresses.c.email_address.like(users.c.name + '%'))
+ ... )
+ {sql}>>> conn.execute(s).fetchall() # doctest: +NORMALIZE_WHITESPACE
SELECT users.fullname
- FROM users JOIN addresses ON addresses.email_address LIKE users.name || ?
+ FROM users JOIN addresses ON addresses.email_address LIKE (users.name || ?)
('%',)
{stop}[(u'Jack Jones',), (u'Jack Jones',), (u'Wendy Williams',)]
-The ``outerjoin()`` function just creates ``LEFT OUTER JOIN`` constructs. It's
-used just like ``join()``:
+The :meth:`~.FromClause.outerjoin` method creates ``LEFT OUTER JOIN`` constructs,
+and is used in the same way as :meth:`~.FromClause.join`:
.. sourcecode:: pycon+sql
- >>> s = select([users.c.fullname], from_obj=[users.outerjoin(addresses)])
+ >>> s = select([users.c.fullname]).select_from(users.outerjoin(addresses))
>>> print s # doctest: +NORMALIZE_WHITESPACE
SELECT users.fullname
- FROM users LEFT OUTER JOIN addresses ON users.id = addresses.user_id
+ FROM users
+ LEFT OUTER JOIN addresses ON users.id = addresses.user_id
That's the output ``outerjoin()`` produces, unless, of course, you're stuck in
a gig using Oracle prior to version 9, and you've set up your engine (which
@@ -910,132 +958,6 @@ would be using ``OracleDialect``) to use Oracle-specific SQL:
If you don't know what that SQL means, don't worry ! The secret tribe of
Oracle DBAs don't want their black magic being found out ;).
-Intro to Generative Selects
-================================================
-
-We've now gained the ability to construct very sophisticated statements. We
-can use all kinds of operators, table constructs, text, joins, and aliases.
-The point of all of this, as mentioned earlier, is not that it's an "easier"
-or "better" way to write SQL than just writing a SQL statement yourself; the
-point is that it's better for writing *programmatically generated* SQL which
-can be morphed and adapted as needed in automated scenarios.
-
-To support this, the :func:`.select` construct we've been working with
-supports piecemeal construction, in addition to the "all at once" method we've
-been doing. Suppose you're writing a search function, which receives criterion
-and then must construct a select from it. To accomplish this, upon each
-criterion encountered, you apply "generative" criterion to an existing
-:func:`.select` construct with new elements, one at a time. We start with a
-basic :func:`.select` constructed with the shortcut method available on the
-``users`` table:
-
-.. sourcecode:: pycon+sql
-
- >>> query = users.select()
- >>> print query # doctest: +NORMALIZE_WHITESPACE
- SELECT users.id, users.name, users.fullname
- FROM users
-
-We encounter search criterion of "name='jack'". So we apply WHERE criterion
-stating such:
-
-.. sourcecode:: pycon+sql
-
- >>> query = query.where(users.c.name=='jack')
-
-Next, we encounter that they'd like the results in descending order by full
-name. We apply ORDER BY, using an extra modifier ``desc``:
-
-.. sourcecode:: pycon+sql
-
- >>> query = query.order_by(users.c.fullname.desc())
-
-We also come across that they'd like only users who have an address at MSN. A
-quick way to tack this on is by using an EXISTS clause, which we correlate to
-the ``users`` table in the enclosing SELECT:
-
-.. sourcecode:: pycon+sql
-
- >>> from sqlalchemy.sql import exists
- >>> query = query.where(
- ... exists([addresses.c.id],
- ... and_(addresses.c.user_id==users.c.id, addresses.c.email_address.like('%@msn.com'))
- ... ).correlate(users))
-
-And finally, the application also wants to see the listing of email addresses
-at once; so to save queries, we outerjoin the ``addresses`` table (using an
-outer join so that users with no addresses come back as well; since we're
-programmatic, we might not have kept track that we used an EXISTS clause
-against the ``addresses`` table too...). Additionally, since the ``users`` and
-``addresses`` table both have a column named ``id``, let's isolate their names
-from each other in the COLUMNS clause by using labels:
-
-.. sourcecode:: pycon+sql
-
- >>> query = query.column(addresses).select_from(users.outerjoin(addresses)).apply_labels()
-
-Let's bake for .0001 seconds and see what rises:
-
-.. sourcecode:: pycon+sql
-
- >>> conn.execute(query).fetchall() # doctest: +NORMALIZE_WHITESPACE
- {opensql}SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, addresses.id AS addresses_id, addresses.user_id AS addresses_user_id, addresses.email_address AS addresses_email_address
- FROM users LEFT OUTER JOIN addresses ON users.id = addresses.user_id
- WHERE users.name = ? AND (EXISTS (SELECT addresses.id
- FROM addresses
- WHERE addresses.user_id = users.id AND addresses.email_address LIKE ?)) ORDER BY users.fullname DESC
- ('jack', '%@msn.com')
- {stop}[(1, u'jack', u'Jack Jones', 1, 1, u'jack@yahoo.com'), (1, u'jack', u'Jack Jones', 2, 1, u'jack@msn.com')]
-
-The generative approach is about starting small, adding one thing at a time,
-to arrive with a full statement.
-
-Transforming a Statement
-------------------------
-
-We've seen how methods like :meth:`.Select.where` and :meth:`.SelectBase.order_by` are
-part of the so-called *Generative* family of methods on the :func:`.select` construct,
-where one :func:`.select` copies itself to return a new one with modifications.
-SQL constructs also support another form of generative behavior which is
-the *transformation*. This is an advanced technique that most core applications
-won't use directly; however, it is a system which the ORM relies on heavily,
-and can be useful for any system that deals with generalized behavior of Core SQL
-constructs.
-
-Using a transformation we can take our ``users``/``addresses`` query and replace
-all occurrences of ``addresses`` with an alias of itself. That is, anywhere
-that ``addresses`` is referred to in the original query, the new query will
-refer to ``addresses_1``, which is selected as ``addresses AS addresses_1``.
-The :meth:`.FromClause.replace_selectable` method can achieve this:
-
-.. sourcecode:: pycon+sql
-
- >>> a1 = addresses.alias()
- >>> query = query.replace_selectable(addresses, a1)
- >>> print query # doctest: +NORMALIZE_WHITESPACE
- {opensql}SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, addresses_1.email_address AS addresses_1_email_address
- FROM users LEFT OUTER JOIN addresses AS addresses_1 ON users.id = addresses_1.user_id
- WHERE users.name = :name_1 AND (EXISTS (SELECT addresses_1.id
- FROM addresses AS addresses_1
- WHERE addresses_1.user_id = users.id AND addresses_1.email_address LIKE :email_address_1)) ORDER BY users.fullname DESC
-
-For a query such as the above, we can access the columns referred
-to by the ``a1`` alias in a result set using the :class:`.Column` objects
-present directly on ``a1``:
-
-.. sourcecode:: pycon+sql
-
- {sql}>>> for row in conn.execute(query):
- ... print "Name:", row[users.c.name], "; Email Address", row[a1.c.email_address] # doctest: +NORMALIZE_WHITESPACE
- SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, addresses_1.email_address AS addresses_1_email_address
- FROM users LEFT OUTER JOIN addresses AS addresses_1 ON users.id = addresses_1.user_id
- WHERE users.name = ? AND (EXISTS (SELECT addresses_1.id
- FROM addresses AS addresses_1
- WHERE addresses_1.user_id = users.id AND addresses_1.email_address LIKE ?)) ORDER BY users.fullname DESC
- ('jack', '%@msn.com')
- {stop}Name: jack ; Email Address jack@yahoo.com
- Name: jack ; Email Address jack@msn.com
-
Everything Else
================
@@ -1055,7 +977,7 @@ here where it converts to positional for SQLite:
.. sourcecode:: pycon+sql
>>> from sqlalchemy.sql import bindparam
- >>> s = users.select(users.c.name==bindparam('username'))
+ >>> s = users.select(users.c.name == bindparam('username'))
{sql}>>> conn.execute(s, username='wendy').fetchall() # doctest: +NORMALIZE_WHITESPACE
SELECT users.id, users.name, users.fullname
FROM users
@@ -1074,7 +996,7 @@ off to the database:
{sql}>>> conn.execute(s, username='wendy').fetchall() # doctest: +NORMALIZE_WHITESPACE
SELECT users.id, users.name, users.fullname
FROM users
- WHERE users.name LIKE ? || '%'
+ WHERE users.name LIKE (? || '%')
('wendy',)
{stop}[(2, u'wendy', u'Wendy Williams')]
@@ -1084,14 +1006,23 @@ single named value is needed in the execute parameters:
.. sourcecode:: pycon+sql
- >>> s = select([users, addresses],
- ... users.c.name.like(bindparam('name', type_=String) + text("'%'")) |
- ... addresses.c.email_address.like(bindparam('name', type_=String) + text("'@%'")),
- ... from_obj=[users.outerjoin(addresses)])
+ >>> s = select([users, addresses]).\
+ ... where(
+ ... or_(
+ ... users.c.name.like(
+ ... bindparam('name', type_=String) + text("'%'")),
+ ... addresses.c.email_address.like(
+ ... bindparam('name', type_=String) + text("'@%'"))
+ ... )
+ ... ).\
+ ... select_from(users.outerjoin(addresses)).\
+ ... order_by(addresses.c.id)
{sql}>>> conn.execute(s, name='jack').fetchall() # doctest: +NORMALIZE_WHITESPACE
- SELECT users.id, users.name, users.fullname, addresses.id, addresses.user_id, addresses.email_address
+ SELECT users.id, users.name, users.fullname, addresses.id,
+ addresses.user_id, addresses.email_address
FROM users LEFT OUTER JOIN addresses ON users.id = addresses.user_id
- WHERE users.name LIKE ? || '%' OR addresses.email_address LIKE ? || '@%'
+ WHERE users.name LIKE (? || '%') OR addresses.email_address LIKE (? || '@%')
+ ORDER BY addresses.id
('jack', 'jack')
{stop}[(1, u'jack', u'Jack Jones', 1, 1, u'jack@yahoo.com'), (1, u'jack', u'Jack Jones', 2, 1, u'jack@msn.com')]
@@ -1136,13 +1067,16 @@ not important in this case:
.. sourcecode:: pycon+sql
- >>> print conn.execute(
- ... select([func.max(addresses.c.email_address, type_=String).label('maxemail')])
- ... ).scalar() # doctest: +NORMALIZE_WHITESPACE
+ >>> conn.execute(
+ ... select([
+ ... func.max(addresses.c.email_address, type_=String).
+ ... label('maxemail')
+ ... ])
+ ... ).scalar() # doctest: +NORMALIZE_WHITESPACE
{opensql}SELECT max(addresses.email_address) AS maxemail
FROM addresses
()
- {stop}www@www.org
+ {stop}u'www@www.org'
Databases such as PostgreSQL and Oracle which support functions that return
whole result sets can be assembled into selectable units, which can be used in
@@ -1154,14 +1088,19 @@ well as bind parameters:
.. sourcecode:: pycon+sql
>>> from sqlalchemy.sql import column
- >>> calculate = select([column('q'), column('z'), column('r')],
- ... from_obj=[func.calculate(bindparam('x'), bindparam('y'))])
-
- >>> print select([users], users.c.id > calculate.c.z) # doctest: +NORMALIZE_WHITESPACE
+ >>> calculate = select([column('q'), column('z'), column('r')]).\
+ ... select_from(
+ ... func.calculate(
+ ... bindparam('x'),
+ ... bindparam('y')
+ ... )
+ ... )
+ >>> calc = calculate.alias()
+ >>> print select([users]).where(users.c.id > calc.c.z) # doctest: +NORMALIZE_WHITESPACE
SELECT users.id, users.name, users.fullname
FROM users, (SELECT q, z, r
- FROM calculate(:x, :y))
- WHERE users.id > z
+ FROM calculate(:x, :y)) AS anon_1
+ WHERE users.id > anon_1.z
If we wanted to use our ``calculate`` statement twice with different bind
parameters, the :func:`~sqlalchemy.sql.expression.ClauseElement.unique_params`
@@ -1171,21 +1110,20 @@ of our selectable:
.. sourcecode:: pycon+sql
- >>> s = select([users], users.c.id.between(
- ... calculate.alias('c1').unique_params(x=17, y=45).c.z,
- ... calculate.alias('c2').unique_params(x=5, y=12).c.z))
-
+ >>> calc1 = calculate.alias('c1').unique_params(x=17, y=45)
+ >>> calc2 = calculate.alias('c2').unique_params(x=5, y=12)
+ >>> s = select([users]).\
+ ... where(users.c.id.between(calc1.c.z, calc2.c.z))
>>> print s # doctest: +NORMALIZE_WHITESPACE
SELECT users.id, users.name, users.fullname
- FROM users, (SELECT q, z, r
- FROM calculate(:x_1, :y_1)) AS c1, (SELECT q, z, r
- FROM calculate(:x_2, :y_2)) AS c2
+ FROM users,
+ (SELECT q, z, r FROM calculate(:x_1, :y_1)) AS c1,
+ (SELECT q, z, r FROM calculate(:x_2, :y_2)) AS c2
WHERE users.id BETWEEN c1.z AND c2.z
>>> s.compile().params
{u'x_2': 5, u'y_2': 12, u'y_1': 45, u'x_1': 17}
-See also :data:`~.expression.func`.
Window Functions
-----------------
@@ -1196,7 +1134,10 @@ OVER clause, using the :meth:`~.FunctionElement.over` method:
.. sourcecode:: pycon+sql
- >>> s = select([users.c.id, func.row_number().over(order_by=users.c.name)])
+ >>> s = select([
+ ... users.c.id,
+ ... func.row_number().over(order_by=users.c.name)
+ ... ])
>>> print s # doctest: +NORMALIZE_WHITESPACE
SELECT users.id, row_number() OVER (ORDER BY users.name) AS anon_1
FROM users
@@ -1205,40 +1146,51 @@ Unions and Other Set Operations
-------------------------------
Unions come in two flavors, UNION and UNION ALL, which are available via
-module level functions:
+module level functions :func:`~.expression.union` and
+:func:`~.expression.union_all`:
.. sourcecode:: pycon+sql
>>> from sqlalchemy.sql import union
>>> u = union(
- ... addresses.select(addresses.c.email_address=='foo@bar.com'),
- ... addresses.select(addresses.c.email_address.like('%@yahoo.com')),
+ ... addresses.select().
+ ... where(addresses.c.email_address == 'foo@bar.com'),
+ ... addresses.select().
+ ... where(addresses.c.email_address.like('%@yahoo.com')),
... ).order_by(addresses.c.email_address)
- {sql}>>> print conn.execute(u).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {sql}>>> conn.execute(u).fetchall() # doctest: +NORMALIZE_WHITESPACE
SELECT addresses.id, addresses.user_id, addresses.email_address
FROM addresses
- WHERE addresses.email_address = ? UNION SELECT addresses.id, addresses.user_id, addresses.email_address
+ WHERE addresses.email_address = ?
+ UNION
+ SELECT addresses.id, addresses.user_id, addresses.email_address
FROM addresses
WHERE addresses.email_address LIKE ? ORDER BY addresses.email_address
('foo@bar.com', '%@yahoo.com')
{stop}[(1, 1, u'jack@yahoo.com')]
-Also available, though not supported on all databases, are ``intersect()``,
-``intersect_all()``, ``except_()``, and ``except_all()``:
+Also available, though not supported on all databases, are
+:func:`~.expression.intersect`,
+:func:`~.expression.intersect_all`,
+:func:`~.expression.except_`, and :func:`~.expression.except_all`:
.. sourcecode:: pycon+sql
>>> from sqlalchemy.sql import except_
>>> u = except_(
- ... addresses.select(addresses.c.email_address.like('%@%.com')),
- ... addresses.select(addresses.c.email_address.like('%@msn.com'))
+ ... addresses.select().
+ ... where(addresses.c.email_address.like('%@%.com')),
+ ... addresses.select().
+ ... where(addresses.c.email_address.like('%@msn.com'))
... )
- {sql}>>> print conn.execute(u).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {sql}>>> conn.execute(u).fetchall() # doctest: +NORMALIZE_WHITESPACE
SELECT addresses.id, addresses.user_id, addresses.email_address
FROM addresses
- WHERE addresses.email_address LIKE ? EXCEPT SELECT addresses.id, addresses.user_id, addresses.email_address
+ WHERE addresses.email_address LIKE ?
+ EXCEPT
+ SELECT addresses.id, addresses.user_id, addresses.email_address
FROM addresses
WHERE addresses.email_address LIKE ?
('%@%.com', '%@msn.com')
@@ -1256,209 +1208,347 @@ want the "union" to be stated as a subquery:
>>> u = except_(
... union(
- ... addresses.select(addresses.c.email_address.like('%@yahoo.com')),
- ... addresses.select(addresses.c.email_address.like('%@msn.com'))
+ ... addresses.select().
+ ... where(addresses.c.email_address.like('%@yahoo.com')),
+ ... addresses.select().
+ ... where(addresses.c.email_address.like('%@msn.com'))
... ).alias().select(), # apply subquery here
... addresses.select(addresses.c.email_address.like('%@msn.com'))
... )
- {sql}>>> print conn.execute(u).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {sql}>>> conn.execute(u).fetchall() # doctest: +NORMALIZE_WHITESPACE
SELECT anon_1.id, anon_1.user_id, anon_1.email_address
FROM (SELECT addresses.id AS id, addresses.user_id AS user_id,
- addresses.email_address AS email_address FROM addresses
- WHERE addresses.email_address LIKE ? UNION SELECT addresses.id AS id,
- addresses.user_id AS user_id, addresses.email_address AS email_address
- FROM addresses WHERE addresses.email_address LIKE ?) AS anon_1 EXCEPT
+ addresses.email_address AS email_address
+ FROM addresses
+ WHERE addresses.email_address LIKE ?
+ UNION
+ SELECT addresses.id AS id,
+ addresses.user_id AS user_id,
+ addresses.email_address AS email_address
+ FROM addresses
+ WHERE addresses.email_address LIKE ?) AS anon_1
+ EXCEPT
SELECT addresses.id, addresses.user_id, addresses.email_address
FROM addresses
WHERE addresses.email_address LIKE ?
('%@yahoo.com', '%@msn.com', '%@msn.com')
{stop}[(1, 1, u'jack@yahoo.com')]
+.. _scalar_selects:
Scalar Selects
--------------
-To embed a SELECT in a column expression, use
-:func:`~sqlalchemy.sql.expression.SelectBase.as_scalar`:
+A scalar select is a SELECT that returns exactly one row and one
+column. It can then be used as a column expression. A scalar select
+is often a :term:`correlated subquery`, which relies upon the enclosing
+SELECT statement in order to acquire at least one of its FROM clauses.
+
+The :func:`.select` construct can be modified to act as a
+column expression by calling either the :meth:`~.SelectBase.as_scalar`
+or :meth:`~.SelectBase.label` method:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select([func.count(addresses.c.id)]).\
+ ... where(users.c.id == addresses.c.user_id).\
+ ... as_scalar()
+
+The above construct is now a :class:`~.expression.ScalarSelect` object,
+and is no longer part of the :class:`~.expression.FromClause` hierarchy;
+it instead is within the :class:`~.expression.ColumnElement` family of
+expression constructs. We can place this construct the same as any
+other column within another :func:`.select`:
.. sourcecode:: pycon+sql
- {sql}>>> print conn.execute(select([ # doctest: +NORMALIZE_WHITESPACE
- ... users.c.name,
- ... select([func.count(addresses.c.id)], users.c.id==addresses.c.user_id).as_scalar()
- ... ])).fetchall()
- SELECT users.name, (SELECT count(addresses.id) AS count_1
+ >>> conn.execute(select([users.c.name, stmt])).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name, (SELECT count(addresses.id) AS count_1
FROM addresses
WHERE users.id = addresses.user_id) AS anon_1
FROM users
()
- {stop}[(u'jack', 2), (u'wendy', 2), (u'fred', 0), (u'mary', 0)]
+ {stop}[(u'jack', 2), (u'wendy', 2)]
-Alternatively, applying a ``label()`` to a select evaluates it as a scalar as
-well:
+To apply a non-anonymous column name to our scalar select, we create
+it using :meth:`.SelectBase.label` instead:
.. sourcecode:: pycon+sql
- {sql}>>> print conn.execute(select([ # doctest: +NORMALIZE_WHITESPACE
- ... users.c.name,
- ... select([func.count(addresses.c.id)], users.c.id==addresses.c.user_id).label('address_count')
- ... ])).fetchall()
- SELECT users.name, (SELECT count(addresses.id) AS count_1
+ >>> stmt = select([func.count(addresses.c.id)]).\
+ ... where(users.c.id == addresses.c.user_id).\
+ ... label("address_count")
+ >>> conn.execute(select([users.c.name, stmt])).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name, (SELECT count(addresses.id) AS count_1
FROM addresses
WHERE users.id = addresses.user_id) AS address_count
FROM users
()
- {stop}[(u'jack', 2), (u'wendy', 2), (u'fred', 0), (u'mary', 0)]
+ {stop}[(u'jack', 2), (u'wendy', 2)]
.. _correlated_subqueries:
Correlated Subqueries
---------------------
-Notice in the examples on "scalar selects", the FROM clause of each embedded
+Notice in the examples on :ref:`scalar_selects`, the FROM clause of each embedded
select did not contain the ``users`` table in its FROM clause. This is because
-SQLAlchemy automatically attempts to correlate embedded FROM objects to that
-of an enclosing query. To disable this, or to specify explicit FROM clauses to
-be correlated, use ``correlate()``::
+SQLAlchemy automatically :term:`correlates` embedded FROM objects to that
+of an enclosing query, if present, and if the inner SELECT statement would
+still have at least one FROM clause of its own. For example:
- >>> s = select([users.c.name], users.c.id==select([users.c.id]).correlate(None))
- >>> print s # doctest: +NORMALIZE_WHITESPACE
- SELECT users.name
- FROM users
- WHERE users.id = (SELECT users.id
- FROM users)
+.. sourcecode:: pycon+sql
- >>> s = select([users.c.name, addresses.c.email_address], users.c.id==
- ... select([users.c.id], users.c.id==addresses.c.user_id).correlate(addresses)
- ... )
- >>> print s # doctest: +NORMALIZE_WHITESPACE
- SELECT users.name, addresses.email_address
- FROM users, addresses
- WHERE users.id = (SELECT users.id
+ >>> stmt = select([addresses.c.user_id]).\
+ ... where(addresses.c.user_id == users.c.id).\
+ ... where(addresses.c.email_address == 'jack@yahoo.com')
+ >>> enclosing_stmt = select([users.c.name]).where(users.c.id == stmt)
+ >>> conn.execute(enclosing_stmt).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name
FROM users
- WHERE users.id = addresses.user_id)
+ WHERE users.id = (SELECT addresses.user_id
+ FROM addresses
+ WHERE addresses.user_id = users.id
+ AND addresses.email_address = ?)
+ ('jack@yahoo.com',)
+ {stop}[(u'jack',)]
+
+Auto-correlation will usually do what's expected, however it can also be controlled.
+For example, if we wanted a statement to correlate only to the ``addresses`` table
+but not the ``users`` table, even if both were present in the enclosing SELECT,
+we use the :meth:`~.Select.correlate` method to specify those FROM clauses that
+may be correlated:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select([users.c.id]).\
+ ... where(users.c.id == addresses.c.user_id).\
+ ... where(users.c.name == 'jack').\
+ ... correlate(addresses)
+ >>> enclosing_stmt = select(
+ ... [users.c.name, addresses.c.email_address]).\
+ ... select_from(users.join(addresses)).\
+ ... where(users.c.id == stmt)
+ >>> conn.execute(enclosing_stmt).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name, addresses.email_address
+ FROM users JOIN addresses ON users.id = addresses.user_id
+ WHERE users.id = (SELECT users.id
+ FROM users
+ WHERE users.id = addresses.user_id AND users.name = ?)
+ ('jack',)
+ {stop}[(u'jack', u'jack@yahoo.com'), (u'jack', u'jack@msn.com')]
+
+To entirely disable a statement from correlating, we can pass ``None``
+as the argument:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select([users.c.id]).\
+ ... where(users.c.name == 'wendy').\
+ ... correlate(None)
+ >>> enclosing_stmt = select([users.c.name]).\
+ ... where(users.c.id == stmt)
+ >>> conn.execute(enclosing_stmt).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name
+ FROM users
+ WHERE users.id = (SELECT users.id
+ FROM users
+ WHERE users.name = ?)
+ ('wendy',)
+ {stop}[(u'wendy',)]
Ordering, Grouping, Limiting, Offset...ing...
---------------------------------------------
+Ordering is done by passing column expressions to the
+:meth:`~.SelectBase.order_by` method:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select([users.c.name]).order_by(users.c.name)
+ >>> conn.execute(stmt).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name
+ FROM users ORDER BY users.name
+ ()
+ {stop}[(u'jack',), (u'wendy',)]
-The :func:`.select` function can take keyword arguments ``order_by``,
-``group_by`` (as well as ``having``), ``limit``, and ``offset``. There's also
-``distinct=True``. These are all also available as generative functions.
-``order_by()`` expressions can use the modifiers ``asc()`` or ``desc()`` to
-indicate ascending or descending.
+Ascending or descending can be controlled using the :meth:`~.ColumnElement.asc`
+and :meth:`~.ColumnElement.desc` modifiers:
.. sourcecode:: pycon+sql
- >>> s = select([addresses.c.user_id, func.count(addresses.c.id)]).\
- ... group_by(addresses.c.user_id).having(func.count(addresses.c.id)>1)
- {sql}>>> print conn.execute(s).fetchall() # doctest: +NORMALIZE_WHITESPACE
- SELECT addresses.user_id, count(addresses.id) AS count_1
- FROM addresses GROUP BY addresses.user_id
- HAVING count(addresses.id) > ?
- (1,)
- {stop}[(1, 2), (2, 2)]
+ >>> stmt = select([users.c.name]).order_by(users.c.name.desc())
+ >>> conn.execute(stmt).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name
+ FROM users ORDER BY users.name DESC
+ ()
+ {stop}[(u'wendy',), (u'jack',)]
+
+Grouping refers to the GROUP BY clause, and is usually used in conjunction
+with aggregate functions to establish groups of rows to be aggregated.
+This is provided via the :meth:`~.SelectBase.group_by` method:
- >>> s = select([addresses.c.email_address, addresses.c.id]).distinct().\
- ... order_by(addresses.c.email_address.desc(), addresses.c.id)
- {sql}>>> conn.execute(s).fetchall() # doctest: +NORMALIZE_WHITESPACE
- SELECT DISTINCT addresses.email_address, addresses.id
- FROM addresses ORDER BY addresses.email_address DESC, addresses.id
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select([users.c.name, func.count(addresses.c.id)]).\
+ ... select_from(users.join(addresses)).\
+ ... group_by(users.c.name)
+ >>> conn.execute(stmt).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name, count(addresses.id) AS count_1
+ FROM users JOIN addresses
+ ON users.id = addresses.user_id
+ GROUP BY users.name
()
- {stop}[(u'www@www.org', 3), (u'wendy@aol.com', 4), (u'jack@yahoo.com', 1), (u'jack@msn.com', 2)]
+ {stop}[(u'jack', 2), (u'wendy', 2)]
- >>> s = select([addresses]).offset(1).limit(1)
- {sql}>>> print conn.execute(s).fetchall() # doctest: +NORMALIZE_WHITESPACE
- SELECT addresses.id, addresses.user_id, addresses.email_address
- FROM addresses
- LIMIT 1 OFFSET 1
+HAVING can be used to filter results on an aggregate value, after GROUP BY has
+been applied. It's available here via the :meth:`~.Select.having`
+method:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select([users.c.name, func.count(addresses.c.id)]).\
+ ... select_from(users.join(addresses)).\
+ ... group_by(users.c.name).\
+ ... having(func.length(users.c.name) > 4)
+ >>> conn.execute(stmt).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name, count(addresses.id) AS count_1
+ FROM users JOIN addresses
+ ON users.id = addresses.user_id
+ GROUP BY users.name
+ HAVING length(users.name) > ?
+ (4,)
+ {stop}[(u'wendy', 2)]
+
+A common system of dealing with duplicates in composed SELECT statments
+is the DISTINCT modifier. A simple DISTINCT clause can be added using the
+:meth:`.Select.distinct` method:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select([users.c.name]).\
+ ... where(addresses.c.email_address.
+ ... contains(users.c.name)).\
+ ... distinct()
+ >>> conn.execute(stmt).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT DISTINCT users.name
+ FROM users, addresses
+ WHERE addresses.email_address LIKE '%%' || users.name || '%%'
()
- {stop}[(2, 1, u'jack@msn.com')]
+ {stop}[(u'jack',), (u'wendy',)]
-.. _inserts_and_updates:
+Most database backends support a system of limiting how many rows
+are returned, and the majority also feature a means of starting to return
+rows after a given "offset". While common backends like Postgresql,
+MySQL and SQLite support LIMIT and OFFSET keywords, other backends
+need to refer to more esoteric features such as "window functions"
+and row ids to achieve the same effect. The :meth:`~.Select.limit`
+and :meth:`~.Select.offset` methods provide an easy abstraction
+into the current backend's methodology:
-Inserts and Updates
-===================
+.. sourcecode:: pycon+sql
-Finally, we're back to INSERT for some more detail. The
-:func:`~sqlalchemy.sql.expression.insert` construct provides a :meth:`~.ValuesBase.values`
-method which can be used to send any value or clause expression to the VALUES
-portion of the INSERT::
+ >>> stmt = select([users.c.name, addresses.c.email_address]).\
+ ... select_from(users.join(addresses)).\
+ ... limit(1).offset(1)
+ >>> conn.execute(stmt).fetchall() # doctest: +NORMALIZE_WHITESPACE
+ {opensql}SELECT users.name, addresses.email_address
+ FROM users JOIN addresses ON users.id = addresses.user_id
+ LIMIT ? OFFSET ?
+ (1, 1)
+ {stop}[(u'jack', u'jack@msn.com')]
- # insert from a function
- users.insert().values(id=12, name=func.upper('jack'))
- # insert from a concatenation expression
- addresses.insert().values(email_address = name + '@' + host)
+.. _inserts_and_updates:
-``values()`` can be mixed with per-execution values::
+Inserts, Updates and Deletes
+============================
- conn.execute(
- users.insert().values(name=func.upper('jack')),
- fullname='Jack Jones'
- )
+We've seen :meth:`~.TableClause.insert` demonstrated
+earlier in this tutorial. Where :meth:`~.TableClause.insert`
+prodces INSERT, the :meth:`~.TableClause.update`
+method produces UPDATE. Both of these constructs feature
+a method called :meth:`~.ValuesBase.values` which specifies
+the VALUES or SET clause of the statement.
-:func:`~sqlalchemy.sql.expression.bindparam` constructs can be passed, however
-the names of the table's columns are reserved for the "automatic" generation
-of bind names::
+The :meth:`~.ValuesBase.values` method accommodates any column expression
+as a value:
- users.insert().values(id=bindparam('_id'), name=bindparam('_name'))
+.. sourcecode:: pycon+sql
- # insert many rows at once:
- conn.execute(
- users.insert().values(id=bindparam('_id'), name=bindparam('_name')),
- [
- {'_id':1, '_name':'name1'},
- {'_id':2, '_name':'name2'},
- {'_id':3, '_name':'name3'},
- ]
- )
+ >>> stmt = users.update().\
+ ... values(fullname="Fullname: " + users.c.name)
+ >>> conn.execute(stmt) #doctest: +ELLIPSIS
+ {opensql}UPDATE users SET fullname=(? || users.name)
+ ('Fullname: ',)
+ COMMIT
+ {stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
+
+When using :meth:`~.TableClause.insert` or :meth:`~.TableClause.update`
+in an "execute many" context, we may also want to specify named
+bound parameters which we can refer to in the argument list.
+The two constructs will automatically generate bound placeholders
+for any column names passed in the dictionaries sent to
+:meth:`~.Connection.execute` at execution time. However, if we
+wish to use explicitly targeted named parameters with composed expressions,
+we need to use the :func:`~.expression.bindparam` construct.
+When using :func:`~.expression.bindparam` with
+:meth:`~.TableClause.insert` or :meth:`~.TableClause.update`,
+the names of the table's columns themselves are reserved for the
+"automatic" generation of bind names. We can combine the usage
+of implicitly available bind names and explicitly named parameters
+as in the example below:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = users.insert().\
+ ... values(name=bindparam('_name') + " .. name")
+ >>> conn.execute(stmt, [ # doctest: +ELLIPSIS
+ ... {'id':4, '_name':'name1'},
+ ... {'id':5, '_name':'name2'},
+ ... {'id':6, '_name':'name3'},
+ ... ])
+ {opensql}INSERT INTO users (id, name) VALUES (?, (? || ?))
+ ((4, 'name1', ' .. name'), (5, 'name2', ' .. name'), (6, 'name3', ' .. name'))
+ COMMIT
+ <sqlalchemy.engine.result.ResultProxy object at 0x...>
-An UPDATE statement is emitted using the :func:`.update` construct. These
-work much like an INSERT, except there is an additional WHERE clause
+An UPDATE statement is emitted using the :meth:`~.TableClause.update` construct. This
+works much like an INSERT, except there is an additional WHERE clause
that can be specified:
.. sourcecode:: pycon+sql
- >>> # change 'jack' to 'ed'
- {sql}>>> conn.execute(users.update().
- ... where(users.c.name=='jack').
- ... values(name='ed')
- ... ) #doctest: +ELLIPSIS
- UPDATE users SET name=? WHERE users.name = ?
- ('ed', 'jack')
- COMMIT
- {stop}<sqlalchemy.engine.ResultProxy object at 0x...>
+ >>> stmt = users.update().\
+ ... where(users.c.name == 'jack').\
+ ... values(name='ed')
- >>> # use bind parameters
- >>> u = users.update().\
- ... where(users.c.name==bindparam('oldname')).\
- ... values(name=bindparam('newname'))
- {sql}>>> conn.execute(u, oldname='jack', newname='ed') #doctest: +ELLIPSIS
- UPDATE users SET name=? WHERE users.name = ?
+ >>> conn.execute(stmt) #doctest: +ELLIPSIS
+ {opensql}UPDATE users SET name=? WHERE users.name = ?
('ed', 'jack')
COMMIT
- {stop}<sqlalchemy.engine.ResultProxy object at 0x...>
+ {stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
- >>> # with binds, you can also update many rows at once
- {sql}>>> conn.execute(u,
+When using :meth:`~.TableClause.update` in an "execute many" context,
+we may wish to also use explicitly named bound parameters in the
+WHERE clause. Again, :func:`~.expression.bindparam` is the construct
+used to achieve this:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = users.update().\
+ ... where(users.c.name == bindparam('oldname')).\
+ ... values(name=bindparam('newname'))
+ >>> conn.execute(stmt, [
... {'oldname':'jack', 'newname':'ed'},
... {'oldname':'wendy', 'newname':'mary'},
... {'oldname':'jim', 'newname':'jake'},
- ... ) #doctest: +ELLIPSIS
- UPDATE users SET name=? WHERE users.name = ?
- [('ed', 'jack'), ('mary', 'wendy'), ('jake', 'jim')]
+ ... ]) #doctest: +ELLIPSIS
+ {opensql}UPDATE users SET name=? WHERE users.name = ?
+ (('ed', 'jack'), ('mary', 'wendy'), ('jake', 'jim'))
COMMIT
- {stop}<sqlalchemy.engine.ResultProxy object at 0x...>
+ {stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
- >>> # update a column to an expression.:
- {sql}>>> conn.execute(users.update().
- ... values(fullname="Fullname: " + users.c.name)
- ... ) #doctest: +ELLIPSIS
- UPDATE users SET fullname=(? || users.name)
- ('Fullname: ',)
- COMMIT
- {stop}<sqlalchemy.engine.ResultProxy object at 0x...>
Correlated Updates
------------------
@@ -1468,15 +1558,17 @@ table, or the same table:
.. sourcecode:: pycon+sql
- >>> s = select([addresses.c.email_address], addresses.c.user_id==users.c.id).limit(1)
- {sql}>>> conn.execute(users.update().values(fullname=s)) #doctest: +ELLIPSIS,+NORMALIZE_WHITESPACE
- UPDATE users SET fullname=(SELECT addresses.email_address
- FROM addresses
- WHERE addresses.user_id = users.id
- LIMIT 1 OFFSET 0)
- ()
+ >>> stmt = select([addresses.c.email_address]).\
+ ... where(addresses.c.user_id == users.c.id).\
+ ... limit(1)
+ >>> conn.execute(users.update().values(fullname=stmt)) #doctest: +ELLIPSIS,+NORMALIZE_WHITESPACE
+ {opensql}UPDATE users SET fullname=(SELECT addresses.email_address
+ FROM addresses
+ WHERE addresses.user_id = users.id
+ LIMIT ? OFFSET ?)
+ (1, 0)
COMMIT
- {stop}<sqlalchemy.engine.ResultProxy object at 0x...>
+ {stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
Multiple Table Updates
----------------------
@@ -1493,7 +1585,7 @@ implicitly, by specifying multiple tables in the WHERE clause::
stmt = users.update().\
values(name='ed wood').\
- where(users.c.id==addresses.c.id).\
+ where(users.c.id == addresses.c.id).\
where(addresses.c.email_address.startswith('ed%'))
conn.execute(stmt)
@@ -1511,7 +1603,7 @@ SET clause directly, using the dictionary form passed to :meth:`.Update.values`:
users.c.name:'ed wood',
addresses.c.email_address:'ed.wood@foo.com'
}).\
- where(users.c.id==addresses.c.id).\
+ where(users.c.id == addresses.c.id).\
where(addresses.c.email_address.startswith('ed%'))
The tables are referenced explicitly in the SET clause::
@@ -1528,24 +1620,42 @@ by the database if this syntax is not supported.
.. _deletes:
Deletes
-========
+-------
Finally, a delete. This is accomplished easily enough using the
-:func:`~.expression.delete` construct:
+:meth:`~.TableClause.delete` construct:
.. sourcecode:: pycon+sql
- {sql}>>> conn.execute(addresses.delete()) #doctest: +ELLIPSIS
- DELETE FROM addresses
+ >>> conn.execute(addresses.delete()) #doctest: +ELLIPSIS
+ {opensql}DELETE FROM addresses
()
COMMIT
- {stop}<sqlalchemy.engine.ResultProxy object at 0x...>
+ {stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
- {sql}>>> conn.execute(users.delete().where(users.c.name > 'm')) #doctest: +ELLIPSIS
- DELETE FROM users WHERE users.name > ?
+ >>> conn.execute(users.delete().where(users.c.name > 'm')) #doctest: +ELLIPSIS
+ {opensql}DELETE FROM users WHERE users.name > ?
('m',)
COMMIT
- {stop}<sqlalchemy.engine.ResultProxy object at 0x...>
+ {stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
+
+Matched Row Counts
+------------------
+
+Both of :meth:`~.TableClause.update` and
+:meth:`~.TableClause.delete` are associated with *matched row counts*. This is a
+number indicating the number of rows that were matched by the WHERE clause.
+Note that by "matched", this includes rows where no UPDATE actually took place.
+The value is available as :attr:`~.ResultProxy.rowcount`:
+
+.. sourcecode:: pycon+sql
+
+ >>> result = conn.execute(users.delete()) #doctest: +ELLIPSIS
+ {opensql}DELETE FROM users
+ ()
+ COMMIT
+ {stop}>>> result.rowcount
+ 1
Further Reference
==================
diff --git a/doc/build/core/types.rst b/doc/build/core/types.rst
index e59c81fc0..131e8e64d 100644
--- a/doc/build/core/types.rst
+++ b/doc/build/core/types.rst
@@ -672,7 +672,7 @@ Usage::
The implementation for :meth:`.ColumnOperators.__add__` is consulted
by an owning SQL expression, by instantiating the :class:`.TypeEngine.Comparator` with
-itself as as the ``expr`` attribute. The mechanics of the expression
+itself as the ``expr`` attribute. The mechanics of the expression
system are such that operations continue recursively until an
expression object produces a new SQL expression construct. Above, we
could just as well have said ``self.expr.op("goofy")(other)`` instead
diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst
index 46628eed7..abf6e44f6 100644
--- a/doc/build/dialects/index.rst
+++ b/doc/build/dialects/index.rst
@@ -38,13 +38,15 @@ External Dialects
SQLAlchemy install and test suite from growing inordinately large.
The "classic" dialects such as SQLite, MySQL, Postgresql, Oracle,
- SQL Server, Firebird will remain in the Core for the time being.
+ SQL Server, and Firebird will remain in the Core for the time being.
Current external dialect projects for SQLAlchemy include:
+* `ibm_db_sa <http://code.google.com/p/ibm-db/wiki/README>`_ - driver for IBM DB2, developed jointly by IBM and SQLAlchemy developers.
* `sqlalchemy-access <https://bitbucket.org/zzzeek/sqlalchemy-access>`_ - driver for Microsoft Access.
* `sqlalchemy-akiban <https://github.com/zzzeek/sqlalchemy_akiban>`_ - driver and ORM extensions for the `Akiban <http://www.akiban.com>`_ database.
* `sqlalchemy-cubrid <https://bitbucket.org/zzzeek/sqlalchemy-cubrid>`_ - driver for the CUBRID database.
* `sqlalchemy-maxdb <https://bitbucket.org/zzzeek/sqlalchemy-maxdb>`_ - driver for the MaxDB database.
+* `CALCHIPAN <https://bitbucket.org/zzzeek/calchipan/>`_ - Adapts `Pandas <http://pandas.pydata.org/>`_ dataframes to SQLAlchemy.
diff --git a/doc/build/dialects/mysql.rst b/doc/build/dialects/mysql.rst
index b5119f23f..1e2784554 100644
--- a/doc/build/dialects/mysql.rst
+++ b/doc/build/dialects/mysql.rst
@@ -175,6 +175,11 @@ MySQL-Connector
.. automodule:: sqlalchemy.dialects.mysql.mysqlconnector
+cymysql
+------------
+
+.. automodule:: sqlalchemy.dialects.mysql.cymysql
+
Google App Engine
-----------------------
diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst
index 8a473fda4..afdc35eda 100644
--- a/doc/build/glossary.rst
+++ b/doc/build/glossary.rst
@@ -158,6 +158,37 @@ Glossary
of classes; "joined", "single", and "concrete". The section
:ref:`inheritance_toplevel` describes inheritance mapping fully.
+ generative
+ A term that SQLAlchemy uses to refer what's normally known
+ as :term:`method chaining`; see that term for details.
+
+ method chaining
+ An object-oriented technique whereby the state of an object
+ is constructed by calling methods on the object. The
+ object features any number of methods, each of which return
+ a new object (or in some cases the same object) with
+ additional state added to the object.
+
+ The two SQLAlchemy objects that make the most use of
+ method chaining are the :class:`~.expression.Select`
+ object and the :class:`~.orm.query.Query` object.
+ For example, a :class:`~.expression.Select` object can
+ be assigned two expressions to its WHERE clause as well
+ as an ORDER BY clause by calling upon the :meth:`~.Select.where`
+ and :meth:`~.Select.order_by` methods::
+
+ stmt = select([user.c.name]).\
+ where(user.c.id > 5).\
+ where(user.c.name.like('e%').\
+ order_by(user.c.name)
+
+ Each method call above returns a copy of the original
+ :class:`~.expression.Select` object with additional qualifiers
+ added.
+
+ .. seealso::
+
+ :term:`generative`
release
releases
@@ -231,3 +262,36 @@ Glossary
`Unit of Work by Martin Fowler <http://martinfowler.com/eaaCatalog/unitOfWork.html>`_
:doc:`orm/session`
+
+ correlates
+ correlated subquery
+ correlated subqueries
+ A :term:`subquery` is correlated if it depends on data in the
+ enclosing ``SELECT``.
+
+ Below, a subquery selects the aggregate value ``MIN(a.id)``
+ from the ``email_address`` table, such that
+ it will be invoked for each value of ``user_account.id``, correlating
+ the value of this column against the ``email_address.user_account_id``
+ column:
+
+ .. sourcecode:: sql
+
+ SELECT user_account.name, email_address.email
+ FROM user_account
+ JOIN email_address ON user_account.id=email_address.user_account_id
+ WHERE email_address.id = (
+ SELECT MIN(a.id) FROM email_address AS a
+ WHERE a.user_account_id=user_account.id
+ )
+
+ The above subquery refers to the ``user_account`` table, which is not itself
+ in the ``FROM`` clause of this nested query. Instead, the ``user_account``
+ table is recieved from the enclosing query, where each row selected from
+ ``user_account`` results in a distinct execution of the subquery.
+
+ A correlated subquery is nearly always present in the :term:`WHERE clause`
+ or :term:`columns clause` of the enclosing ``SELECT`` statement, and never
+ in the :term:`FROM clause`; this is because
+ the correlation can only proceed once the original source rows from the enclosing
+ statement's FROM clause are available.
diff --git a/doc/build/intro.rst b/doc/build/intro.rst
index fc7e1142e..c5e7f7425 100644
--- a/doc/build/intro.rst
+++ b/doc/build/intro.rst
@@ -94,7 +94,7 @@ SQLAlchemy supports installation using standard Python "distutils" or
* **Standard Setuptools** - When using `setuptools <http://pypi.python.org/pypi/setuptools/>`_,
SQLAlchemy can be installed via ``setup.py`` or ``easy_install``, and the C
extensions are supported. setuptools is not supported on Python 3 at the time
- of of this writing.
+ of this writing.
* **Distribute** - With `distribute <http://pypi.python.org/pypi/distribute/>`_,
SQLAlchemy can be installed via ``setup.py`` or ``easy_install``, and the C
extensions as well as Python 3 builds are supported.
diff --git a/doc/build/orm/session.rst b/doc/build/orm/session.rst
index 97d6f15a0..6774af2d9 100644
--- a/doc/build/orm/session.rst
+++ b/doc/build/orm/session.rst
@@ -1372,7 +1372,7 @@ Using Subtransactions with Autocommit
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A subtransaction indicates usage of the :meth:`.Session.begin` method in conjunction with
-the ``subtransactions=True`` flag. This produces a a non-transactional, delimiting construct that
+the ``subtransactions=True`` flag. This produces a non-transactional, delimiting construct that
allows nesting of calls to :meth:`~.Session.begin` and :meth:`~.Session.commit`.
It's purpose is to allow the construction of code that can function within a transaction
both independently of any external code that starts a transaction,
diff --git a/doc/build/testdocs.py b/doc/build/testdocs.py
index 815aa8669..9d84808e5 100644
--- a/doc/build/testdocs.py
+++ b/doc/build/testdocs.py
@@ -60,7 +60,8 @@ def replace_file(s, newfile):
raise ValueError("Couldn't find suitable create_engine call to replace '%s' in it" % oldfile)
return s
-for filename in 'orm/tutorial','core/tutorial',:
+#for filename in 'orm/tutorial','core/tutorial',:
+for filename in 'core/tutorial',:
filename = '%s.rst' % filename
s = open(filename).read()
#s = replace_file(s, ':memory:')
diff --git a/examples/dogpile_caching/caching_query.py b/examples/dogpile_caching/caching_query.py
index 9a705cf31..f4724fb0b 100644
--- a/examples/dogpile_caching/caching_query.py
+++ b/examples/dogpile_caching/caching_query.py
@@ -136,24 +136,15 @@ def _key_from_query(query, qualifier=None):
"""
- v = []
- def visit_bindparam(bind):
-
- if bind.key in query._params:
- value = query._params[bind.key]
- elif bind.callable:
- value = bind.callable()
- else:
- value = bind.value
-
- v.append(unicode(value))
-
stmt = query.statement
- visitors.traverse(stmt, {}, {'bindparam': visit_bindparam})
+ compiled = stmt.compile()
+ params = compiled.params
# here we return the key as a long string. our "key mangler"
# set up with the region will boil it down to an md5.
- return " ".join([unicode(stmt)] + v)
+ return " ".join(
+ [unicode(compiled)] +
+ [unicode(params[k]) for k in sorted(params)])
class FromCache(MapperOption):
"""Specifies that a Query should load results from a cache."""
@@ -187,24 +178,6 @@ class RelationshipCache(MapperOption):
propagate_to_loaders = True
- def __init__(self, attribute, region="default"):
- self.region = region
- self.cls_ = attribute.property.parent.class_
- self.key = attribute.property.key
-
- def process_query_conditionally(self, query):
- if query._current_path:
- mapper, key = query._current_path[-2:]
- if issubclass(mapper.class_, self.cls_) and \
- key == self.key:
- query._cache_region = self
-
-class RelationshipCache(MapperOption):
- """Specifies that a Query as called within a "lazy load"
- should load results from a cache."""
-
- propagate_to_loaders = True
-
def __init__(self, attribute, region="default", cache_key=None):
"""Construct a new RelationshipCache.
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 2352f1308..6e924ea9d 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -120,7 +120,7 @@ from .engine import create_engine, engine_from_config
__all__ = sorted(name for name, obj in locals().items()
if not (name.startswith('_') or _inspect.ismodule(obj)))
-__version__ = '0.8.0'
+__version__ = '0.8.1'
del _inspect, sys
diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py
index fbbff153c..7f5d34707 100644
--- a/lib/sqlalchemy/dialects/__init__.py
+++ b/lib/sqlalchemy/dialects/__init__.py
@@ -18,7 +18,6 @@ __all__ = (
from .. import util
-
def _auto_fn(name):
"""default dialect importer.
diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
index 206dbf38b..d581f799a 100644
--- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
+++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
@@ -50,8 +50,8 @@ __ http://kinterbasdb.sourceforge.net/dist_docs/usage.html#special_issue_concurr
from .base import FBDialect, FBExecutionContext
from ... import util, types as sqltypes
-from ...util.compat import decimal
from re import match
+import decimal
class _FBNumeric_kinterbasdb(sqltypes.Numeric):
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index 35ce2450e..80e59d323 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -8,6 +8,7 @@
from ... import Table, MetaData, Column
from ...types import String, Unicode, Integer, TypeDecorator
+from ... import cast
ischema = MetaData()
@@ -22,6 +23,9 @@ class CoerceUnicode(TypeDecorator):
# end Py2K
return value
+ def bind_expression(self, bindvalue):
+ return cast(bindvalue, Unicode)
+
schemata = Table("SCHEMATA", ischema,
Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index 6f6d3b01f..b916612fb 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -54,7 +54,7 @@ class MSDialect_pymssql(MSDialect):
module = __import__('pymssql')
# pymmsql doesn't have a Binary method. we use string
# TODO: monkeypatching here is less than ideal
- module.Binary = str
+ module.Binary = lambda x: x if hasattr(x, 'decode') else str(x)
client_ver = tuple(int(x) for x in module.__version__.split("."))
if client_ver < (1, ):
@@ -85,6 +85,8 @@ class MSDialect_pymssql(MSDialect):
def is_disconnect(self, e, connection, cursor):
for msg in (
+ "Adaptive Server connection timed out",
+ "message 20003", # connection timeout
"Error 10054",
"Not connected to any MS SQL server",
"Connection is closed"
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index 8854d1caa..beb6066f5 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -114,7 +114,7 @@ for unix + PyODBC.
from .base import MSExecutionContext, MSDialect
from ...connectors.pyodbc import PyODBCConnector
from ... import types as sqltypes, util
-from ...util.compat import decimal
+import decimal
class _MSNumeric_pyodbc(sqltypes.Numeric):
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 62598ad00..6e78b52ea 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -2040,7 +2040,7 @@ class MySQLDialect(default.DefaultDialect):
try:
try:
rs = connection.execute(st)
- have = rs.rowcount > 0
+ have = len(rs.fetchall()) > 0
rs.close()
return have
except exc.DBAPIError, e:
@@ -2409,7 +2409,6 @@ class MySQLTableDefinitionParser(object):
state.constraints.append(spec)
else:
pass
-
return state
def _parse_constraints(self, line):
diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py
index d9b43869c..0806f63b4 100644
--- a/lib/sqlalchemy/dialects/mysql/cymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/cymysql.py
@@ -43,6 +43,7 @@ class MySQLDialect_cymysql(MySQLDialect_mysqldb):
driver = 'cymysql'
description_encoding = None
+ supports_sane_rowcount = False
colspecs = util.update_copy(
MySQLDialect.colspecs,
diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
index a93a78b73..ad0ce7638 100644
--- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py
+++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
@@ -65,10 +65,10 @@ class MySQLDialect_gaerdbms(MySQLDialect_mysqldb):
return [], opts
def _extract_error_code(self, exception):
- match = re.compile(r"^(\d+):").match(str(exception))
+ match = re.compile(r"^(\d+):|^\((\d+),").match(str(exception))
# The rdbms api will wrap then re-raise some types of errors
# making this regex return no matches.
- code = match.group(1) if match else None
+ code = match.group(1) or match.group(2) if match else None
if code:
return int(code)
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index fd9fea878..b8f7439f5 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -190,7 +190,7 @@ from ...engine import result as _result
from sqlalchemy import types as sqltypes, util, exc, processors
import random
import collections
-from sqlalchemy.util.compat import decimal
+import decimal
import re
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index a7a9e65ce..c59caff8d 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -1030,6 +1030,15 @@ class PGCompiler(compiler.SQLCompiler):
field, self.process(expr))
+ def visit_substring_func(self, func, **kw):
+ s = self.process(func.clauses.clauses[0], **kw)
+ start = self.process(func.clauses.clauses[1], **kw)
+ if len(func.clauses.clauses) > 2:
+ length = self.process(func.clauses.clauses[2], **kw)
+ return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length)
+ else:
+ return "SUBSTRING(%s FROM %s)" % (s, start)
+
class PGDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column)
@@ -1042,8 +1051,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
(
isinstance(column.default, schema.Sequence) and
column.default.optional
- )
- ):
+ )):
if isinstance(impl_type, sqltypes.BigInteger):
colspec += " BIGSERIAL"
else:
diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py
index 157e03fd5..e555a1afd 100644
--- a/lib/sqlalchemy/dialects/postgresql/hstore.py
+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py
@@ -6,7 +6,7 @@
import re
-from .base import ARRAY
+from .base import ARRAY, ischema_names
from ... import types as sqltypes
from ...sql import functions as sqlfunc
from ...sql.operators import custom_op
@@ -276,6 +276,9 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
return process
+ischema_names['hstore'] = HSTORE
+
+
class hstore(sqlfunc.GenericFunction):
"""Construct an hstore value within a SQL expression using the
Postgresql ``hstore()`` function.
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index 214db348c..0e503746c 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -27,7 +27,7 @@ yet.
"""
from ... import util, exc
-from ...util.compat import decimal
+import decimal
from ... import processors
from ... import types as sqltypes
from .base import PGDialect, \
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 649a95ee6..1f118067f 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -147,7 +147,7 @@ import re
import logging
from ... import util, exc
-from ...util.compat import decimal
+import decimal
from ... import processors
from ...engine import result as _result
from ...sql import expression
diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py
index 644f4edb1..283c60da3 100644
--- a/lib/sqlalchemy/dialects/sybase/pyodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py
@@ -36,7 +36,7 @@ from sqlalchemy.dialects.sybase.base import SybaseDialect,\
SybaseExecutionContext
from sqlalchemy.connectors.pyodbc import PyODBCConnector
from sqlalchemy import types as sqltypes, processors
-from sqlalchemy.util.compat import decimal
+import decimal
class _SybNumeric_pyodbc(sqltypes.Numeric):
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 6242f0816..b4c9b1e1c 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -62,6 +62,7 @@ class Connection(Connectable):
self.__savepoint_seq = 0
self.__branch = _branch
self.__invalid = False
+ self.__can_reconnect = True
if _dispatch:
self.dispatch = _dispatch
elif engine._has_events:
@@ -213,8 +214,8 @@ class Connection(Connectable):
def closed(self):
"""Return True if this connection is closed."""
- return not self.__invalid and '_Connection__connection' \
- not in self.__dict__
+ return '_Connection__connection' not in self.__dict__ \
+ and not self.__can_reconnect
@property
def invalidated(self):
@@ -232,7 +233,7 @@ class Connection(Connectable):
return self._revalidate_connection()
def _revalidate_connection(self):
- if self.__invalid:
+ if self.__can_reconnect and self.__invalid:
if self.__transaction is not None:
raise exc.InvalidRequestError(
"Can't reconnect until invalid "
@@ -461,7 +462,6 @@ class Connection(Connectable):
self.engine.dialect.do_begin(self.connection)
except Exception, e:
self._handle_dbapi_exception(e, None, None, None, None)
- raise
def _rollback_impl(self):
if self._has_events:
@@ -475,7 +475,6 @@ class Connection(Connectable):
self.__transaction = None
except Exception, e:
self._handle_dbapi_exception(e, None, None, None, None)
- raise
else:
self.__transaction = None
@@ -490,7 +489,6 @@ class Connection(Connectable):
self.__transaction = None
except Exception, e:
self._handle_dbapi_exception(e, None, None, None, None)
- raise
def _savepoint_impl(self, name=None):
if self._has_events:
@@ -577,15 +575,15 @@ class Connection(Connectable):
and will allow no further operations.
"""
-
try:
conn = self.__connection
except AttributeError:
- return
- if not self.__branch:
- conn.close()
- self.__invalid = False
- del self.__connection
+ pass
+ else:
+ if not self.__branch:
+ conn.close()
+ del self.__connection
+ self.__can_reconnect = False
self.__transaction = None
def scalar(self, object, *multiparams, **params):
@@ -692,7 +690,6 @@ class Connection(Connectable):
dialect, self, conn)
except Exception, e:
self._handle_dbapi_exception(e, None, None, None, None)
- raise
ret = ctx._exec_default(default, None)
if self.should_close_with_result:
@@ -829,7 +826,6 @@ class Connection(Connectable):
self._handle_dbapi_exception(e,
str(statement), parameters,
None, None)
- raise
if context.compiled:
context.pre_exec()
@@ -876,7 +872,6 @@ class Connection(Connectable):
parameters,
cursor,
context)
- raise
if self._has_events:
self.dispatch.after_cursor_execute(self, cursor,
@@ -951,7 +946,6 @@ class Connection(Connectable):
parameters,
cursor,
None)
- raise
def _safe_close_cursor(self, cursor):
"""Close the given cursor, catching exceptions
@@ -972,23 +966,31 @@ class Connection(Connectable):
if isinstance(e, (SystemExit, KeyboardInterrupt)):
raise
+ _reentrant_error = False
+ _is_disconnect = False
+
def _handle_dbapi_exception(self,
e,
statement,
parameters,
cursor,
context):
- if getattr(self, '_reentrant_error', False):
- # Py3K
- #raise exc.DBAPIError.instance(statement, parameters, e,
- # self.dialect.dbapi.Error) from e
- # Py2K
- raise exc.DBAPIError.instance(statement,
+
+ exc_info = sys.exc_info()
+
+ if not self._is_disconnect:
+ self._is_disconnect = isinstance(e, self.dialect.dbapi.Error) and \
+ not self.closed and \
+ self.dialect.is_disconnect(e, self.__connection, cursor)
+
+ if self._reentrant_error:
+ util.raise_from_cause(
+ exc.DBAPIError.instance(statement,
parameters,
e,
- self.dialect.dbapi.Error), \
- None, sys.exc_info()[2]
- # end Py2K
+ self.dialect.dbapi.Error),
+ exc_info
+ )
self._reentrant_error = True
try:
# non-DBAPI error - if we already got a context,
@@ -1006,45 +1008,35 @@ class Connection(Connectable):
e)
context.handle_dbapi_exception(e)
- is_disconnect = isinstance(e, self.dialect.dbapi.Error) and \
- self.dialect.is_disconnect(e, self.__connection, cursor)
-
- if is_disconnect:
- dbapi_conn_wrapper = self.connection
- self.invalidate(e)
- if not hasattr(dbapi_conn_wrapper, '_pool') or \
- dbapi_conn_wrapper._pool is self.engine.pool:
- self.engine.dispose()
- else:
+ if not self._is_disconnect:
if cursor:
self._safe_close_cursor(cursor)
self._autorollback()
- if self.should_close_with_result:
- self.close()
-
- if not should_wrap:
- return
-
- # Py3K
- #raise exc.DBAPIError.instance(
- # statement,
- # parameters,
- # e,
- # self.dialect.dbapi.Error,
- # connection_invalidated=is_disconnect) \
- # from e
- # Py2K
- raise exc.DBAPIError.instance(
- statement,
- parameters,
- e,
- self.dialect.dbapi.Error,
- connection_invalidated=is_disconnect), \
- None, sys.exc_info()[2]
- # end Py2K
+
+ if should_wrap:
+ util.raise_from_cause(
+ exc.DBAPIError.instance(
+ statement,
+ parameters,
+ e,
+ self.dialect.dbapi.Error,
+ connection_invalidated=self._is_disconnect),
+ exc_info
+ )
+
+ util.reraise(*exc_info)
finally:
del self._reentrant_error
+ if self._is_disconnect:
+ del self._is_disconnect
+ dbapi_conn_wrapper = self.connection
+ self.invalidate(e)
+ if not hasattr(dbapi_conn_wrapper, '_pool') or \
+ dbapi_conn_wrapper._pool is self.engine.pool:
+ self.engine.dispose()
+ if self.should_close_with_result:
+ self.close()
# poor man's multimethod/generic function thingy
executors = {
@@ -1107,8 +1099,8 @@ class Connection(Connectable):
trans.commit()
return ret
except:
- trans.rollback()
- raise
+ with util.safe_reraise():
+ trans.rollback()
def run_callable(self, callable_, *args, **kwargs):
"""Given a callable object or function, execute it, passing
@@ -1214,8 +1206,8 @@ class Transaction(object):
try:
self.commit()
except:
- self.rollback()
- raise
+ with util.safe_reraise():
+ self.rollback()
else:
self.rollback()
@@ -1540,8 +1532,8 @@ class Engine(Connectable, log.Identified):
try:
trans = conn.begin()
except:
- conn.close()
- raise
+ with util.safe_reraise():
+ conn.close()
return Engine._trans_ctx(conn, trans, close_with_result)
def transaction(self, callable_, *args, **kwargs):
diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py
index 53d7f3340..c61a9d59c 100644
--- a/lib/sqlalchemy/engine/ddl.py
+++ b/lib/sqlalchemy/engine/ddl.py
@@ -21,7 +21,7 @@ class SchemaGenerator(DDLBase):
tables=None, **kwargs):
super(SchemaGenerator, self).__init__(connection, **kwargs)
self.checkfirst = checkfirst
- self.tables = tables and set(tables) or None
+ self.tables = tables
self.preparer = dialect.identifier_preparer
self.dialect = dialect
self.memo = {}
@@ -39,17 +39,17 @@ class SchemaGenerator(DDLBase):
(
(not self.dialect.sequences_optional or
not sequence.optional) and
- (
- not self.checkfirst or
- not self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema)
- )
+ (
+ not self.checkfirst or
+ not self.dialect.has_sequence(
+ self.connection,
+ sequence.name,
+ schema=sequence.schema)
+ )
)
def visit_metadata(self, metadata):
- if self.tables:
+ if self.tables is not None:
tables = self.tables
else:
tables = metadata.tables.values()
@@ -117,7 +117,7 @@ class SchemaDropper(DDLBase):
self.memo = {}
def visit_metadata(self, metadata):
- if self.tables:
+ if self.tables is not None:
tables = self.tables
else:
tables = metadata.tables.values()
@@ -160,7 +160,7 @@ class SchemaDropper(DDLBase):
((not self.dialect.sequences_optional or
not sequence.optional) and
(not self.checkfirst or
- self.dialect.has_sequence(
+ self.dialect.has_sequence(
self.connection,
sequence.name,
schema=sequence.schema))
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 1db0f2ce4..daa9fe085 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -34,6 +34,10 @@ class DefaultDialect(interfaces.Dialect):
preparer = compiler.IdentifierPreparer
supports_alter = True
+ # the first value we'd get for an autoincrement
+ # column.
+ default_sequence_base = 1
+
# most DBAPIs happy with this for execute().
# not cx_oracle.
execute_sequence_format = tuple
@@ -679,7 +683,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
lastrowid = proc(lastrowid)
self.inserted_primary_key = [
- c is autoinc_col and lastrowid or v
+ lastrowid if c is autoinc_col else v
for c, v in zip(
table.primary_key,
self.inserted_primary_key)
@@ -733,7 +737,6 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
except Exception, e:
self.root_connection._handle_dbapi_exception(
e, None, None, None, self)
- raise
else:
inputsizes = {}
for key in self.compiled.bind_names.values():
@@ -752,7 +755,6 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
except Exception, e:
self.root_connection._handle_dbapi_exception(
e, None, None, None, self)
- raise
def _exec_default(self, default, type_):
if default.is_sequence:
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 7572564bb..88930081e 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -301,7 +301,7 @@ class ResultMetaData(object):
# this check isn't currently available if the row
# was unpickled.
if result is not None and \
- result[1] is not None:
+ result[1] is not None:
for obj in result[1]:
if key._compare_name_for_result(obj):
break
@@ -443,7 +443,6 @@ class ResultProxy(object):
except Exception, e:
self.connection._handle_dbapi_exception(
e, None, None, self.cursor, self.context)
- raise
@property
def lastrowid(self):
@@ -467,7 +466,6 @@ class ResultProxy(object):
self.connection._handle_dbapi_exception(
e, None, None,
self._saved_cursor, self.context)
- raise
@property
def returns_rows(self):
@@ -752,7 +750,6 @@ class ResultProxy(object):
self.connection._handle_dbapi_exception(
e, None, None,
self.cursor, self.context)
- raise
def fetchmany(self, size=None):
"""Fetch many rows, just like DB-API
@@ -772,7 +769,6 @@ class ResultProxy(object):
self.connection._handle_dbapi_exception(
e, None, None,
self.cursor, self.context)
- raise
def fetchone(self):
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
@@ -792,7 +788,6 @@ class ResultProxy(object):
self.connection._handle_dbapi_exception(
e, None, None,
self.cursor, self.context)
- raise
def first(self):
"""Fetch the first row and then close the result set unconditionally.
@@ -809,7 +804,6 @@ class ResultProxy(object):
self.connection._handle_dbapi_exception(
e, None, None,
self.cursor, self.context)
- raise
try:
if row is not None:
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index af77919a7..6f3ffddc7 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -38,7 +38,7 @@ def has_inherited_table(cls):
"""Given a class, return True if any of the classes it inherits from has a
mapped table, otherwise return False.
"""
- for class_ in cls.__mro__:
+ for class_ in cls.__mro__[1:]:
if getattr(class_, '__table__', None) is not None:
return True
return False
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index c9385daaa..3eda127fd 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -174,6 +174,49 @@ class QueryableAttribute(interfaces._MappedAttribute,
# TODO: conditionally attach this method based on clause_element ?
return self
+
+ @util.memoized_property
+ def info(self):
+ """Return the 'info' dictionary for the underlying SQL element.
+
+ The behavior here is as follows:
+
+ * If the attribute is a column-mapped property, i.e.
+ :class:`.ColumnProperty`, which is mapped directly
+ to a schema-level :class:`.Column` object, this attribute
+ will return the :attr:`.SchemaItem.info` dictionary associated
+ with the core-level :class:`.Column` object.
+
+ * If the attribute is a :class:`.ColumnProperty` but is mapped to
+ any other kind of SQL expression other than a :class:`.Column`,
+ the attribute will refer to the :attr:`.MapperProperty.info` dictionary
+ associated directly with the :class:`.ColumnProperty`, assuming the SQL
+ expression itself does not have it's own ``.info`` attribute
+ (which should be the case, unless a user-defined SQL construct
+ has defined one).
+
+ * If the attribute refers to any other kind of :class:`.MapperProperty`,
+ including :class:`.RelationshipProperty`, the attribute will refer
+ to the :attr:`.MapperProperty.info` dictionary associated with
+ that :class:`.MapperProperty`.
+
+ * To access the :attr:`.MapperProperty.info` dictionary of the :class:`.MapperProperty`
+ unconditionally, including for a :class:`.ColumnProperty` that's
+ associated directly with a :class:`.schema.Column`, the attribute
+ can be referred to using :attr:`.QueryableAttribute.property`
+ attribute, as ``MyClass.someattribute.property.info``.
+
+ .. versionadded:: 0.8.0
+
+ .. seealso::
+
+ :attr:`.SchemaItem.info`
+
+ :attr:`.MapperProperty.info`
+
+ """
+ return self.comparator.info
+
@util.memoized_property
def parent(self):
"""Return an inspection instance representing the parent.
@@ -433,6 +476,9 @@ class AttributeImpl(object):
self.expire_missing = expire_missing
+ def __str__(self):
+ return "%s.%s" % (self.class_.__name__, self.key)
+
def _get_active_history(self):
"""Backwards compat for impl.active_history"""
@@ -1043,11 +1089,18 @@ def backref_listeners(attribute, key, uselist):
parent_token = attribute.impl.parent_token
- def _acceptable_key_err(child_state, initiator):
+ def _acceptable_key_err(child_state, initiator, child_impl):
raise ValueError(
- "Object %s not associated with attribute of "
- "type %s" % (orm_util.state_str(child_state),
- manager_of_class(initiator.class_)[initiator.key]))
+ "Bidirectional attribute conflict detected: "
+ 'Passing object %s to attribute "%s" '
+ 'triggers a modify event on attribute "%s" '
+ 'via the backref "%s".' % (
+ orm_util.state_str(child_state),
+ initiator.parent_token,
+ child_impl.parent_token,
+ attribute.impl.parent_token
+ )
+ )
def emit_backref_from_scalar_set_event(state, child, oldchild, initiator):
if oldchild is child:
@@ -1068,8 +1121,8 @@ def backref_listeners(attribute, key, uselist):
instance_dict(child)
child_impl = child_state.manager[key].impl
if initiator.parent_token is not parent_token and \
- initiator.parent_token is not child_impl.parent_token:
- _acceptable_key_err(state, initiator)
+ initiator.parent_token is not child_impl.parent_token:
+ _acceptable_key_err(state, initiator, child_impl)
child_impl.append(
child_state,
child_dict,
@@ -1085,9 +1138,10 @@ def backref_listeners(attribute, key, uselist):
child_state, child_dict = instance_state(child), \
instance_dict(child)
child_impl = child_state.manager[key].impl
+
if initiator.parent_token is not parent_token and \
- initiator.parent_token is not child_impl.parent_token:
- _acceptable_key_err(state, initiator)
+ initiator.parent_token is not child_impl.parent_token:
+ _acceptable_key_err(state, initiator, child_impl)
child_impl.append(
child_state,
child_dict,
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index 51cf9edeb..0e71494c4 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -72,6 +72,13 @@ class ClassManager(dict):
self.manage()
self._instrument_init()
+ if '__del__' in class_.__dict__:
+ util.warn("__del__() method on class %s will "
+ "cause unreachable cycles and memory leaks, "
+ "as SQLAlchemy instrumentation often creates "
+ "reference cycles. Please remove this method." %
+ class_)
+
dispatch = event.dispatcher(events.InstanceEvents)
@property
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index b6391eba3..70743624c 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -149,7 +149,7 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
"""
- cascade = ()
+ cascade = frozenset()
"""The set of 'cascade' attribute names.
This collection is checked before the 'cascade_iterator' method is called.
@@ -209,6 +209,12 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
.. versionadded:: 0.8 Added support for .info to all
:class:`.MapperProperty` subclasses.
+ .. seealso::
+
+ :attr:`.QueryableAttribute.info`
+
+ :attr:`.SchemaItem.info`
+
"""
return {}
@@ -283,7 +289,7 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
def __repr__(self):
return '<%s at 0x%x; %s>' % (
self.__class__.__name__,
- id(self), self.key)
+ id(self), getattr(self, 'key', 'no key'))
class PropComparator(operators.ColumnOperators):
"""Defines boolean, comparison, and other operators for
@@ -390,6 +396,10 @@ class PropComparator(operators.ColumnOperators):
return self.__class__(self.prop, self._parentmapper, adapter)
+ @util.memoized_property
+ def info(self):
+ return self.property.info
+
@staticmethod
def any_op(a, b, **kwargs):
return a.any(b, **kwargs)
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 447a5fce1..c08d91b57 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -761,8 +761,9 @@ class Mapper(_InspectionAttr):
del self._configure_failed
if not self.non_primary and \
+ self.class_manager is not None and \
self.class_manager.is_mapped and \
- self.class_manager.mapper is self:
+ self.class_manager.mapper is self:
instrumentation.unregister_class(self.class_)
def _configure_pks(self):
@@ -1094,7 +1095,7 @@ class Mapper(_InspectionAttr):
# initialized; check for 'readonly'
if hasattr(self, '_readonly_props') and \
(not hasattr(col, 'table') or
- col.table not in self._cols_by_table):
+ col.table not in self._cols_by_table):
self._readonly_props.add(prop)
else:
@@ -1132,6 +1133,16 @@ class Mapper(_InspectionAttr):
"%r for column %r" % (syn, key, key, syn)
)
+ if key in self._props and \
+ not isinstance(prop, properties.ColumnProperty) and \
+ not isinstance(self._props[key], properties.ColumnProperty):
+ util.warn("Property %s on %s being replaced with new "
+ "property %s; the old property will be discarded" % (
+ self._props[key],
+ self,
+ prop,
+ ))
+
self._props[key] = prop
if not self.non_primary:
@@ -1923,6 +1934,8 @@ class Mapper(_InspectionAttr):
for mapper in reversed(list(self.iterate_to_root())):
if mapper.local_table in tables:
start = True
+ elif not isinstance(mapper.local_table, expression.TableClause):
+ return None
if start and not mapper.single:
allconds.append(visitors.cloned_traverse(
mapper.inherit_condition,
@@ -1992,10 +2005,20 @@ class Mapper(_InspectionAttr):
@_memoized_configured_property
def _sorted_tables(self):
table_to_mapper = {}
+
for mapper in self.base_mapper.self_and_descendants:
for t in mapper.tables:
table_to_mapper.setdefault(t, mapper)
+ extra_dependencies = []
+ for table, mapper in table_to_mapper.items():
+ super_ = mapper.inherits
+ if super_:
+ extra_dependencies.extend([
+ (super_table, table)
+ for super_table in super_.tables
+ ])
+
def skip(fk):
# attempt to skip dependencies that are not
# significant to the inheritance chain
@@ -2007,7 +2030,7 @@ class Mapper(_InspectionAttr):
if parent is not None and \
dep is not None and \
dep is not parent and \
- dep.inherit_condition is not None:
+ dep.inherit_condition is not None:
cols = set(sql_util.find_columns(dep.inherit_condition))
if parent.inherit_condition is not None:
cols = cols.union(sql_util.find_columns(
@@ -2018,7 +2041,9 @@ class Mapper(_InspectionAttr):
return False
sorted_ = sql_util.sort_tables(table_to_mapper.iterkeys(),
- skip_fn=skip)
+ skip_fn=skip,
+ extra_dependencies=extra_dependencies)
+
ret = util.OrderedDict()
for t in sorted_:
ret[t] = table_to_mapper[t]
@@ -2214,7 +2239,7 @@ def _event_on_resurrect(state):
state, state.dict, col, val)
-class _ColumnMapping(util.py25_dict):
+class _ColumnMapping(dict):
"""Error reporting helper for mapper._columntoproperty."""
def __init__(self, mapper):
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index d9dfff77d..e225a7c83 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -154,7 +154,7 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
# with the same identity key already exists as persistent.
# convert to an UPDATE if so.
if not has_identity and \
- instance_key in uowtransaction.session.identity_map:
+ instance_key in uowtransaction.session.identity_map:
instance = \
uowtransaction.session.identity_map[instance_key]
existing = attributes.instance_state(instance)
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index c618e89b2..9f8721de9 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -193,6 +193,14 @@ class ColumnProperty(StrategizedProperty):
"parententity": self._parentmapper,
"parentmapper": self._parentmapper})
+ @util.memoized_property
+ def info(self):
+ ce = self.__clause_element__()
+ try:
+ return ce.info
+ except AttributeError:
+ return self.prop.info
+
def __getattr__(self, key):
"""proxy attribute access down to the mapped column.
@@ -230,6 +238,8 @@ class RelationshipProperty(StrategizedProperty):
strategy_wildcard_key = 'relationship:*'
+ _dependency_processor = None
+
def __init__(self, argument,
secondary=None, primaryjoin=None,
secondaryjoin=None,
@@ -252,7 +262,7 @@ class RelationshipProperty(StrategizedProperty):
load_on_pending=False,
strategy_class=None, _local_remote_pairs=None,
query_class=None,
- info=None):
+ info=None):
self.uselist = uselist
self.argument = argument
@@ -297,17 +307,8 @@ class RelationshipProperty(StrategizedProperty):
self._reverse_property = set()
- if cascade is not False:
- self.cascade = CascadeOptions(cascade)
- else:
- self.cascade = CascadeOptions("save-update, merge")
-
- if self.passive_deletes == 'all' and \
- ("delete" in self.cascade or
- "delete-orphan" in self.cascade):
- raise sa_exc.ArgumentError(
- "Can't set passive_deletes='all' in conjunction "
- "with 'delete' or 'delete-orphan' cascade")
+ self.cascade = cascade if cascade is not False \
+ else "save-update, merge"
self.order_by = order_by
@@ -723,8 +724,8 @@ class RelationshipProperty(StrategizedProperty):
if self.property._use_get:
return sql.and_(*[
sql.or_(
- adapt(x) != state_bindparam(adapt(x), state, y),
- adapt(x) == None)
+ adapt(x) != state_bindparam(adapt(x), state, y),
+ adapt(x) == None)
for (x, y) in self.property.local_remote_pairs])
criterion = sql.and_(*[x == y for (x, y) in
@@ -838,7 +839,7 @@ class RelationshipProperty(StrategizedProperty):
if (source_state, r) in _recursive:
return
- if not "merge" in self.cascade:
+ if not "merge" in self._cascade:
return
if self.key not in source_dict:
@@ -916,7 +917,7 @@ class RelationshipProperty(StrategizedProperty):
def cascade_iterator(self, type_, state, dict_,
visited_states, halt_on=None):
- #assert type_ in self.cascade
+ #assert type_ in self._cascade
# only actively lazy load on the 'delete' cascade
if type_ != 'delete' or self.passive_deletes:
@@ -933,7 +934,7 @@ class RelationshipProperty(StrategizedProperty):
passive=passive)
skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
- not in self.cascade
+ not in self._cascade
for instance_state, c in tuples:
if instance_state in visited_states:
@@ -980,7 +981,7 @@ class RelationshipProperty(StrategizedProperty):
'does not reference mapper %s' % (key, self, other,
self.parent))
if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
- == other.direction:
+ == other.direction:
raise sa_exc.ArgumentError('%s and back-reference %s are '
'both of the same direction %r. Did you mean to '
'set remote_side on the many-to-one side ?'
@@ -1025,7 +1026,7 @@ class RelationshipProperty(StrategizedProperty):
self._check_conflicts()
self._process_dependent_arguments()
self._setup_join_conditions()
- self._check_cascade_settings()
+ self._check_cascade_settings(self._cascade)
self._post_init()
self._generate_backref()
super(RelationshipProperty, self).do_init()
@@ -1043,7 +1044,7 @@ class RelationshipProperty(StrategizedProperty):
for attr in (
'order_by', 'primaryjoin', 'secondaryjoin',
'secondary', '_user_defined_foreign_keys', 'remote_side',
- ):
+ ):
attr_value = getattr(self, attr)
if util.callable(attr_value):
setattr(self, attr, attr_value())
@@ -1080,10 +1081,6 @@ class RelationshipProperty(StrategizedProperty):
self.target = self.mapper.mapped_table
- if self.cascade.delete_orphan:
- self.mapper.primary_mapper()._delete_orphans.append(
- (self.key, self.parent.class_)
- )
def _setup_join_conditions(self):
self._join_condition = jc = relationships.JoinCondition(
@@ -1134,29 +1131,58 @@ class RelationshipProperty(StrategizedProperty):
if not self.parent.concrete:
for inheriting in self.parent.iterate_to_root():
if inheriting is not self.parent \
- and inheriting.has_property(self.key):
+ and inheriting.has_property(self.key):
util.warn("Warning: relationship '%s' on mapper "
"'%s' supersedes the same relationship "
"on inherited mapper '%s'; this can "
"cause dependency issues during flush"
% (self.key, self.parent, inheriting))
- def _check_cascade_settings(self):
- if self.cascade.delete_orphan and not self.single_parent \
+ def _get_cascade(self):
+ """Return the current cascade setting for this
+ :class:`.RelationshipProperty`.
+ """
+ return self._cascade
+
+ def _set_cascade(self, cascade):
+ cascade = CascadeOptions(cascade)
+ if 'mapper' in self.__dict__:
+ self._check_cascade_settings(cascade)
+ self._cascade = cascade
+
+ if self._dependency_processor:
+ self._dependency_processor.cascade = cascade
+
+ cascade = property(_get_cascade, _set_cascade)
+
+ def _check_cascade_settings(self, cascade):
+ if cascade.delete_orphan and not self.single_parent \
and (self.direction is MANYTOMANY or self.direction
is MANYTOONE):
raise sa_exc.ArgumentError(
'On %s, delete-orphan cascade is not supported '
- 'on a many-to-many or many-to-one relationship '
- 'when single_parent is not set. Set '
- 'single_parent=True on the relationship().'
- % self)
+ 'on a many-to-many or many-to-one relationship '
+ 'when single_parent is not set. Set '
+ 'single_parent=True on the relationship().'
+ % self)
if self.direction is MANYTOONE and self.passive_deletes:
util.warn("On %s, 'passive_deletes' is normally configured "
"on one-to-many, one-to-one, many-to-many "
"relationships only."
% self)
+ if self.passive_deletes == 'all' and \
+ ("delete" in cascade or
+ "delete-orphan" in cascade):
+ raise sa_exc.ArgumentError(
+ "On %s, can't set passive_deletes='all' in conjunction "
+ "with 'delete' or 'delete-orphan' cascade" % self)
+
+ if cascade.delete_orphan:
+ self.mapper.primary_mapper()._delete_orphans.append(
+ (self.key, self.parent.class_)
+ )
+
def _columns_are_mapped(self, *cols):
"""Return True if all columns in the given collection are
mapped by the tables referenced by this :class:`.Relationship`.
@@ -1164,10 +1190,10 @@ class RelationshipProperty(StrategizedProperty):
"""
for c in cols:
if self.secondary is not None \
- and self.secondary.c.contains_column(c):
+ and self.secondary.c.contains_column(c):
continue
if not self.parent.mapped_table.c.contains_column(c) and \
- not self.target.c.contains_column(c):
+ not self.target.c.contains_column(c):
return False
return True
@@ -1183,11 +1209,15 @@ class RelationshipProperty(StrategizedProperty):
else:
backref_key, kwargs = self.backref
mapper = self.mapper.primary_mapper()
- if mapper.has_property(backref_key):
- raise sa_exc.ArgumentError("Error creating backref "
- "'%s' on relationship '%s': property of that "
- "name exists on mapper '%s'" % (backref_key,
- self, mapper))
+
+ check = set(mapper.iterate_to_root()).\
+ union(mapper.self_and_descendants)
+ for m in check:
+ if m.has_property(backref_key):
+ raise sa_exc.ArgumentError("Error creating backref "
+ "'%s' on relationship '%s': property of that "
+ "name exists on mapper '%s'" % (backref_key,
+ self, m))
# determine primaryjoin/secondaryjoin for the
# backref. Use the one we had, so that
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index fd0c54d88..9e44e01f7 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -352,7 +352,7 @@ class JoinCondition(object):
return
if "foreign" not in binary.left._annotations and \
- "foreign" not in binary.right._annotations:
+ "foreign" not in binary.right._annotations:
col = is_foreign(binary.left, binary.right)
if col is not None:
if col.compare(binary.left):
@@ -451,12 +451,11 @@ class JoinCondition(object):
def visit_binary(binary):
equated = binary.left.compare(binary.right)
if isinstance(binary.left, expression.ColumnClause) and \
- isinstance(binary.right, expression.ColumnClause):
+ isinstance(binary.right, expression.ColumnClause):
# assume one to many - FKs are "remote"
if fn(binary.left):
binary.left = binary.left._annotate({"remote": True})
- if fn(binary.right) and \
- not equated:
+ if fn(binary.right) and not equated:
binary.right = binary.right._annotate(
{"remote": True})
else:
@@ -507,9 +506,9 @@ class JoinCondition(object):
def proc_left_right(left, right):
if isinstance(left, expression.ColumnClause) and \
- isinstance(right, expression.ColumnClause):
+ isinstance(right, expression.ColumnClause):
if self.child_selectable.c.contains_column(right) and \
- self.parent_selectable.c.contains_column(left):
+ self.parent_selectable.c.contains_column(left):
right = right._annotate({"remote": True})
else:
self._warn_non_column_elements()
@@ -532,8 +531,7 @@ class JoinCondition(object):
not self.parent_local_selectable.c.\
contains_column(element)
or self.child_local_selectable.c.\
- contains_column(element)
- ):
+ contains_column(element)):
return element._annotate({"remote": True})
self.primaryjoin = visitors.replacement_traverse(
self.primaryjoin, {}, repl)
@@ -568,7 +566,7 @@ class JoinCondition(object):
def locals_(elem):
if "remote" not in elem._annotations and \
- elem in local_side:
+ elem in local_side:
return elem._annotate({"local": True})
self.primaryjoin = visitors.replacement_traverse(
self.primaryjoin, {}, locals_
@@ -603,7 +601,7 @@ class JoinCondition(object):
can_sync = bool(self.secondary_synchronize_pairs)
if self.support_sync and can_sync or \
- (not self.support_sync and has_foreign):
+ (not self.support_sync and has_foreign):
return
# from here below is just determining the best error message
@@ -685,8 +683,7 @@ class JoinCondition(object):
"Ensure that only those columns referring "
"to a parent column are marked as foreign, "
"either via the foreign() annotation or "
- "via the foreign_keys argument."
- % self.prop)
+ "via the foreign_keys argument." % self.prop)
elif onetomany_fk:
self.direction = ONETOMANY
elif manytoone_fk:
@@ -716,14 +713,14 @@ class JoinCondition(object):
def visit_binary(binary, left, right):
if "remote" in right._annotations and \
"remote" not in left._annotations and \
- self.can_be_synced_fn(left):
+ self.can_be_synced_fn(left):
lrp.add((left, right))
elif "remote" in left._annotations and \
"remote" not in right._annotations and \
- self.can_be_synced_fn(right):
+ self.can_be_synced_fn(right):
lrp.add((right, left))
if binary.operator is operators.eq and \
- self.can_be_synced_fn(left, right):
+ self.can_be_synced_fn(left, right):
if "foreign" in right._annotations:
collection.append((left, right))
elif "foreign" in left._annotations:
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 5fb9b514d..f7a5558f1 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -3,9 +3,10 @@
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
"""Provides the Session class and related utilities."""
+from __future__ import with_statement
+
import weakref
from .. import util, sql, engine, exc as sa_exc, event
from ..sql import util as sql_util, expression
@@ -59,7 +60,9 @@ class _SessionClassMethods(object):
ACTIVE = util.symbol('ACTIVE')
PREPARED = util.symbol('PREPARED')
+COMMITTED = util.symbol('COMMITTED')
DEACTIVE = util.symbol('DEACTIVE')
+CLOSED = util.symbol('CLOSED')
class SessionTransaction(object):
"""A :class:`.Session`-level transaction.
@@ -164,46 +167,51 @@ class SessionTransaction(object):
def is_active(self):
return self.session is not None and self._state is ACTIVE
- def _assert_is_active(self):
- self._assert_is_open()
- if self._state is PREPARED:
+ def _assert_active(self, prepared_ok=False,
+ rollback_ok=False,
+ closed_msg="This transaction is closed"):
+ if self._state is COMMITTED:
raise sa_exc.InvalidRequestError(
- "This session is in 'prepared' state, where no further "
- "SQL can be emitted until the transaction is fully "
- "committed."
+ "This session is in 'committed' state; no further "
+ "SQL can be emitted within this transaction."
)
- elif self._state is DEACTIVE:
- if self._rollback_exception:
+ elif self._state is PREPARED:
+ if not prepared_ok:
raise sa_exc.InvalidRequestError(
- "This Session's transaction has been rolled back "
- "due to a previous exception during flush."
- " To begin a new transaction with this Session, "
- "first issue Session.rollback()."
- " Original exception was: %s"
- % self._rollback_exception
- )
- else:
- raise sa_exc.InvalidRequestError(
- "This Session's transaction has been rolled back "
- "by a nested rollback() call. To begin a new "
- "transaction, issue Session.rollback() first."
+ "This session is in 'prepared' state; no further "
+ "SQL can be emitted within this transaction."
)
-
- def _assert_is_open(self, error_msg="The transaction is closed"):
- if self.session is None:
- raise sa_exc.ResourceClosedError(error_msg)
+ elif self._state is DEACTIVE:
+ if not rollback_ok:
+ if self._rollback_exception:
+ raise sa_exc.InvalidRequestError(
+ "This Session's transaction has been rolled back "
+ "due to a previous exception during flush."
+ " To begin a new transaction with this Session, "
+ "first issue Session.rollback()."
+ " Original exception was: %s"
+ % self._rollback_exception
+ )
+ else:
+ raise sa_exc.InvalidRequestError(
+ "This Session's transaction has been rolled back "
+ "by a nested rollback() call. To begin a new "
+ "transaction, issue Session.rollback() first."
+ )
+ elif self._state is CLOSED:
+ raise sa_exc.ResourceClosedError(closed_msg)
@property
def _is_transaction_boundary(self):
return self.nested or not self._parent
def connection(self, bindkey, **kwargs):
- self._assert_is_active()
+ self._assert_active()
bind = self.session.get_bind(bindkey, **kwargs)
return self._connection_for_bind(bind)
def _begin(self, nested=False):
- self._assert_is_active()
+ self._assert_active()
return SessionTransaction(
self.session, self, nested=nested)
@@ -270,7 +278,7 @@ class SessionTransaction(object):
def _connection_for_bind(self, bind):
- self._assert_is_active()
+ self._assert_active()
if bind in self._connections:
return self._connections[bind][0]
@@ -304,11 +312,12 @@ class SessionTransaction(object):
def prepare(self):
if self._parent is not None or not self.session.twophase:
raise sa_exc.InvalidRequestError(
- "Only root two phase transactions of can be prepared")
+ "'twophase' mode not enabled, or not root transaction; "
+ "can't prepare.")
self._prepare_impl()
def _prepare_impl(self):
- self._assert_is_active()
+ self._assert_active()
if self._parent is None or self.nested:
self.session.dispatch.before_commit(self.session)
@@ -333,13 +342,13 @@ class SessionTransaction(object):
for t in set(self._connections.values()):
t[1].prepare()
except:
- self.rollback()
- raise
+ with util.safe_reraise():
+ self.rollback()
self._state = PREPARED
def commit(self):
- self._assert_is_open()
+ self._assert_active(prepared_ok=True)
if self._state is not PREPARED:
self._prepare_impl()
@@ -347,6 +356,7 @@ class SessionTransaction(object):
for t in set(self._connections.values()):
t[1].commit()
+ self._state = COMMITTED
self.session.dispatch.after_commit(self.session)
if self.session._enable_transaction_accounting:
@@ -356,7 +366,7 @@ class SessionTransaction(object):
return self._parent
def rollback(self, _capture_exception=False):
- self._assert_is_open()
+ self._assert_active(prepared_ok=True, rollback_ok=True)
stx = self.session.transaction
if stx is not self:
@@ -375,7 +385,7 @@ class SessionTransaction(object):
sess = self.session
if self.session._enable_transaction_accounting and \
- not sess._is_clean():
+ not sess._is_clean():
# if items were added, deleted, or mutated
# here, we need to re-restore the snapshot
util.warn(
@@ -405,13 +415,13 @@ class SessionTransaction(object):
self.session.transaction = self._parent
if self._parent is None:
for connection, transaction, autoclose in \
- set(self._connections.values()):
+ set(self._connections.values()):
if autoclose:
connection.close()
else:
transaction.close()
- self._state = DEACTIVE
+ self._state = CLOSED
if self.session.dispatch.after_transaction_end:
self.session.dispatch.after_transaction_end(self.session, self)
@@ -425,16 +435,15 @@ class SessionTransaction(object):
return self
def __exit__(self, type, value, traceback):
- self._assert_is_open("Cannot end transaction context. The transaction "
- "was closed from within the context")
+ self._assert_active(prepared_ok=True)
if self.session.transaction is None:
return
if type is None:
try:
self.commit()
except:
- self.rollback()
- raise
+ with util.safe_reraise():
+ self.rollback()
else:
self.rollback()
@@ -1718,13 +1727,13 @@ class Session(_SessionClassMethods):
def _before_attach(self, state):
if state.session_id != self.hash_key and \
- self.dispatch.before_attach:
+ self.dispatch.before_attach:
self.dispatch.before_attach(self, state.obj())
def _attach(self, state, include_before=False):
if state.key and \
state.key in self.identity_map and \
- not self.identity_map.contains_state(state):
+ not self.identity_map.contains_state(state):
raise sa_exc.InvalidRequestError("Can't attach instance "
"%s; another instance with key %s is already "
"present in this session."
@@ -1740,9 +1749,11 @@ class Session(_SessionClassMethods):
if state.session_id != self.hash_key:
if include_before and \
- self.dispatch.before_attach:
+ self.dispatch.before_attach:
self.dispatch.before_attach(self, state.obj())
state.session_id = self.hash_key
+ if state.modified and state._strong_obj is None:
+ state._strong_obj = state.obj()
if self.dispatch.after_attach:
self.dispatch.after_attach(self, state.obj())
@@ -1920,8 +1931,8 @@ class Session(_SessionClassMethods):
transaction.commit()
except:
- transaction.rollback(_capture_exception=True)
- raise
+ with util.safe_reraise():
+ transaction.rollback(_capture_exception=True)
def is_modified(self, instance, include_collections=True,
passive=True):
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 4bc689e94..193678c2f 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -164,7 +164,7 @@ class InstanceState(interfaces._InspectionAttr):
return bool(self.key)
def _detach(self):
- self.session_id = None
+ self.session_id = self._strong_obj = None
def _dispose(self):
self._detach()
@@ -176,7 +176,7 @@ class InstanceState(interfaces._InspectionAttr):
instance_dict.discard(self)
self.callables = {}
- self.session_id = None
+ self.session_id = self._strong_obj = None
del self.obj
def obj(self):
@@ -259,9 +259,6 @@ class InstanceState(interfaces._InspectionAttr):
self.expired = state.get('expired', False)
self.callables = state.get('callables', {})
- if self.modified:
- self._strong_obj = inst
-
self.__dict__.update([
(k, state[k]) for k in (
'key', 'load_options',
@@ -322,6 +319,7 @@ class InstanceState(interfaces._InspectionAttr):
modified_set.discard(self)
self.modified = False
+ self._strong_obj = None
self.committed_state.clear()
@@ -335,7 +333,7 @@ class InstanceState(interfaces._InspectionAttr):
for key in self.manager:
impl = self.manager[key].impl
if impl.accepts_scalar_loader and \
- (impl.expire_missing or key in dict_):
+ (impl.expire_missing or key in dict_):
self.callables[key] = self
old = dict_.pop(key, None)
if impl.collection and old is not None:
@@ -435,18 +433,22 @@ class InstanceState(interfaces._InspectionAttr):
self.committed_state[attr.key] = previous
- # the "or not self.modified" is defensive at
- # this point. The assertion below is expected
- # to be True:
# assert self._strong_obj is None or self.modified
- if self._strong_obj is None or not self.modified:
+ if (self.session_id and self._strong_obj is None) \
+ or not self.modified:
instance_dict = self._instance_dict()
if instance_dict:
instance_dict._modified.add(self)
- self._strong_obj = self.obj()
- if self._strong_obj is None:
+ # only create _strong_obj link if attached
+ # to a session
+
+ inst = self.obj()
+ if self.session_id:
+ self._strong_obj = inst
+
+ if inst is None:
raise orm_exc.ObjectDereferencedError(
"Can't emit change event for attribute '%s' - "
"parent object of type %s has been garbage "
@@ -467,7 +469,6 @@ class InstanceState(interfaces._InspectionAttr):
this step if a value was not populated in state.dict.
"""
- class_manager = self.manager
for key in keys:
self.committed_state.pop(key, None)
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 5c79de749..0eed50ea4 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -719,7 +719,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
# produce a subquery from it.
left_alias = self._generate_from_original_query(
orig_query, leftmost_mapper,
- leftmost_attr
+ leftmost_attr, entity.mapper
)
# generate another Query that will join the
@@ -772,15 +772,18 @@ class SubqueryLoader(AbstractRelationshipLoader):
def _generate_from_original_query(self,
orig_query, leftmost_mapper,
- leftmost_attr
+ leftmost_attr, entity_mapper
):
# reformat the original query
# to look only for significant columns
q = orig_query._clone().correlate(None)
- # TODO: why does polymporphic etc. require hardcoding
- # into _adapt_col_list ? Does query.add_columns(...) work
- # with polymorphic loading ?
+ # set a real "from" if not present, as this is more
+ # accurate than just going off of the column expression
+ if not q._from_obj and entity_mapper.isa(leftmost_mapper):
+ q._set_select_from(entity_mapper)
+
+ # select from the identity columns of the outer
q._set_entities(q._adapt_col_list(leftmost_attr))
if q._order_by is False:
@@ -792,6 +795,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
# the original query now becomes a subquery
# which we'll join onto.
+
embed_q = q.with_labels().subquery()
left_alias = orm_util.AliasedClass(leftmost_mapper, embed_q,
use_mapper_path=True)
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index 98636d935..1f5115c41 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -40,9 +40,9 @@ def track_cascade_events(descriptor, prop):
prop = state.manager.mapper._props[key]
item_state = attributes.instance_state(item)
- if prop.cascade.save_update and \
+ if prop._cascade.save_update and \
(prop.cascade_backrefs or key == initiator.key) and \
- not sess._contains_state(item_state):
+ not sess._contains_state(item_state):
sess._save_or_update_state(item_state)
return item
@@ -63,9 +63,9 @@ def track_cascade_events(descriptor, prop):
# expunge pending orphans
item_state = attributes.instance_state(item)
- if prop.cascade.delete_orphan and \
+ if prop._cascade.delete_orphan and \
item_state in sess._new and \
- prop.mapper._is_orphan(item_state):
+ prop.mapper._is_orphan(item_state):
sess.expunge(item)
def set_(state, newvalue, oldvalue, initiator):
@@ -83,14 +83,14 @@ def track_cascade_events(descriptor, prop):
prop = state.manager.mapper._props[key]
if newvalue is not None:
newvalue_state = attributes.instance_state(newvalue)
- if prop.cascade.save_update and \
+ if prop._cascade.save_update and \
(prop.cascade_backrefs or key == initiator.key) and \
not sess._contains_state(newvalue_state):
sess._save_or_update_state(newvalue_state)
if oldvalue is not None and \
oldvalue is not attributes.PASSIVE_NO_RESULT and \
- prop.cascade.delete_orphan:
+ prop._cascade.delete_orphan:
# possible to reach here with attributes.NEVER_SET ?
oldvalue_state = attributes.instance_state(oldvalue)
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index cc9dd6ba5..f3b8e271d 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -1265,3 +1265,40 @@ def attribute_str(instance, attribute):
def state_attribute_str(state, attribute):
return state_str(state) + "." + attribute
+
+
+def randomize_unitofwork():
+ """Use random-ordering sets within the unit of work in order
+ to detect unit of work sorting issues.
+
+ This is a utility function that can be used to help reproduce
+ inconsistent unit of work sorting issues. For example,
+ if two kinds of objects A and B are being inserted, and
+ B has a foreign key reference to A - the A must be inserted first.
+ However, if there is no relationship between A and B, the unit of work
+ won't know to perform this sorting, and an operation may or may not
+ fail, depending on how the ordering works out. Since Python sets
+ and dictionaries have non-deterministic ordering, such an issue may
+ occur on some runs and not on others, and in practice it tends to
+ have a great dependence on the state of the interpreter. This leads
+ to so-called "heisenbugs" where changing entirely irrelevant aspects
+ of the test program still cause the failure behavior to change.
+
+ By calling ``randomize_unitofwork()`` when a script first runs, the
+ ordering of a key series of sets within the unit of work implementation
+ are randomized, so that the script can be minimized down to the fundamental
+ mapping and operation that's failing, while still reproducing the issue
+ on at least some runs.
+
+ This utility is also available when running the test suite via the
+ ``--reversetop`` flag.
+
+ .. versionadded:: 0.8.1 created a standalone version of the
+ ``--reversetop`` feature.
+
+ """
+ from sqlalchemy.orm import unitofwork, session, mapper, dependency
+ from sqlalchemy.util import topological
+ from sqlalchemy.testing.util import RandomSet
+ topological.set = unitofwork.set = session.set = mapper.set = \
+ dependency.set = RandomSet
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index f72082ceb..501b6d2a0 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -736,8 +736,8 @@ class QueuePool(Pool):
self._overflow = 0 - pool_size
self._max_overflow = max_overflow
self._timeout = timeout
- self._overflow_lock = self._max_overflow > -1 and \
- threading.Lock() or DummyLock()
+ self._overflow_lock = threading.Lock() if self._max_overflow > -1 \
+ else DummyLock()
def _do_return_conn(self, conn):
try:
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index 9d14bd3ca..9a07b9de4 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -93,14 +93,13 @@ def _get_table_key(name, schema):
def _validate_dialect_kwargs(kwargs, name):
# validate remaining kwargs that they all specify DB prefixes
- if len([k for k in kwargs
- if not re.match(
- r'^(?:%s)_' %
- '|'.join(dialects.__all__), k
- )
- ]):
- raise TypeError(
- "Invalid argument(s) for %s: %r" % (name, kwargs.keys()))
+
+ for k in kwargs:
+ m = re.match('^(.+?)_.*', k)
+ if m is None:
+ raise TypeError("Additional arguments should be "
+ "named <dialectname>_<argument>, got '%s'" % k)
+
inspection._self_inspects(SchemaItem)
@@ -2025,7 +2024,7 @@ class ColumnCollectionMixin(object):
for c in columns]
if self._pending_colargs and \
isinstance(self._pending_colargs[0], Column) and \
- self._pending_colargs[0].table is not None:
+ isinstance(self._pending_colargs[0].table, Table):
self._set_parent_with_dispatch(self._pending_colargs[0].table)
def _set_parent(self, table):
@@ -2121,7 +2120,7 @@ class CheckConstraint(Constraint):
elif _autoattach:
cols = sqlutil.find_columns(self.sqltext)
tables = set([c.table for c in cols
- if c.table is not None])
+ if isinstance(c.table, Table)])
if len(tables) == 1:
self._set_parent_with_dispatch(
tables.pop())
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 59e46de12..b902f9ffc 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -423,7 +423,7 @@ class SQLCompiler(engine.Compiled):
name = orig_name = column.name
if name is None:
raise exc.CompileError("Cannot compile Column object until "
- "it's 'name' is assigned.")
+ "its 'name' is assigned.")
is_literal = column.is_literal
if not is_literal and isinstance(name, sql._truncated_label):
@@ -787,14 +787,14 @@ class SQLCompiler(engine.Compiled):
existing = self.binds[name]
if existing is not bindparam:
if (existing.unique or bindparam.unique) and \
- not existing.proxy_set.intersection(bindparam.proxy_set):
+ not existing.proxy_set.intersection(
+ bindparam.proxy_set):
raise exc.CompileError(
"Bind parameter '%s' conflicts with "
"unique bind parameter of the same name" %
bindparam.key
)
- elif getattr(existing, '_is_crud', False) or \
- getattr(bindparam, '_is_crud', False):
+ elif existing._is_crud or bindparam._is_crud:
raise exc.CompileError(
"bindparam() name '%s' is reserved "
"for automatic usage in the VALUES or SET "
@@ -992,13 +992,15 @@ class SQLCompiler(engine.Compiled):
else:
self.result_map[keyname] = name, objects, type_
- def _label_select_column(self, select, column, populate_result_map,
+ def _label_select_column(self, select, column,
+ populate_result_map,
asfrom, column_clause_args,
+ name=None,
within_columns_clause=True):
"""produce labeled columns present in a select()."""
if column.type._has_column_expression and \
- populate_result_map:
+ populate_result_map:
col_expr = column.type.column_expression(column)
add_to_result_map = lambda keyname, name, objects, type_: \
self._add_to_result_map(
@@ -1023,13 +1025,11 @@ class SQLCompiler(engine.Compiled):
else:
result_expr = col_expr
- elif select is not None and \
- select.use_labels and \
- column._label:
+ elif select is not None and name:
result_expr = _CompileLabel(
col_expr,
- column._label,
- alt_names=(column._key_label, )
+ name,
+ alt_names=(column._key_label,)
)
elif \
@@ -1037,7 +1037,7 @@ class SQLCompiler(engine.Compiled):
isinstance(column, sql.ColumnClause) and \
not column.is_literal and \
column.table is not None and \
- not isinstance(column.table, sql.Select):
+ not isinstance(column.table, sql.Select):
result_expr = _CompileLabel(col_expr,
sql._as_truncated(column.name),
alt_names=(column.key,))
@@ -1086,14 +1086,9 @@ class SQLCompiler(engine.Compiled):
positional_names=None, **kwargs):
entry = self.stack and self.stack[-1] or {}
- if not asfrom:
- existingfroms = entry.get('from', None)
- else:
- # don't render correlations if we're rendering a FROM list
- # entry
- existingfroms = []
+ existingfroms = entry.get('from', None)
- froms = select._get_display_froms(existingfroms)
+ froms = select._get_display_froms(existingfroms, asfrom=asfrom)
correlate_froms = set(sql._from_objects(*froms))
@@ -1103,11 +1098,11 @@ class SQLCompiler(engine.Compiled):
# correlate_froms.union(existingfroms)
populate_result_map = force_result_map or (
- compound_index == 0 and (
- not entry or \
- entry.get('iswrapper', False)
- )
- )
+ compound_index == 0 and (
+ not entry or \
+ entry.get('iswrapper', False)
+ )
+ )
self.stack.append({'from': correlate_froms,
'iswrapper': iswrapper})
@@ -1122,10 +1117,12 @@ class SQLCompiler(engine.Compiled):
# the actual list of columns to print in the SELECT column list.
inner_columns = [
c for c in [
- self._label_select_column(select, column,
+ self._label_select_column(select,
+ column,
populate_result_map, asfrom,
- column_clause_args)
- for column in util.unique_list(select.inner_columns)
+ column_clause_args,
+ name=name)
+ for name, column in select._columns_plus_names
]
if c is not None
]
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 490004e39..28b1c6ddd 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -181,10 +181,10 @@ def select(columns=None, whereclause=None, from_obj=[], **kwargs):
string arguments, which will be converted as appropriate into
either :func:`text()` or :func:`literal_column()` constructs.
- See also:
+ .. seealso::
- :ref:`coretutorial_selecting` - Core Tutorial description of
- :func:`.select`.
+ :ref:`coretutorial_selecting` - Core Tutorial description of
+ :func:`.select`.
:param columns:
A list of :class:`.ClauseElement` objects, typically
@@ -464,7 +464,7 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs):
as_scalar()
)
- See also:
+ .. seealso::
:ref:`inserts_and_updates` - SQL Expression
Language Tutorial
@@ -493,7 +493,7 @@ def delete(table, whereclause=None, **kwargs):
condition of the ``UPDATE`` statement. Note that the
:meth:`~Delete.where()` generative method may be used instead.
- See also:
+ .. seealso::
:ref:`deletes` - SQL Expression Tutorial
@@ -2873,6 +2873,8 @@ class BindParameter(ColumnElement):
__visit_name__ = 'bindparam'
quote = None
+ _is_crud = False
+
def __init__(self, key, value, type_=None, unique=False,
callable_=None,
isoutparam=False, required=False,
@@ -3073,7 +3075,7 @@ class Executable(Generative):
See :meth:`.Connection.execution_options` for a full list of
possible options.
- See also:
+ .. seealso::
:meth:`.Connection.execution_options()`
@@ -3444,15 +3446,15 @@ class Case(ColumnElement):
class FunctionElement(Executable, ColumnElement, FromClause):
"""Base for SQL function-oriented constructs.
- See also:
+ .. seealso::
- :class:`.Function` - named SQL function.
+ :class:`.Function` - named SQL function.
- :data:`.func` - namespace which produces registered or ad-hoc
- :class:`.Function` instances.
+ :data:`.func` - namespace which produces registered or ad-hoc
+ :class:`.Function` instances.
- :class:`.GenericFunction` - allows creation of registered function
- types.
+ :class:`.GenericFunction` - allows creation of registered function
+ types.
"""
@@ -3571,15 +3573,13 @@ class Function(FunctionElement):
See the superclass :class:`.FunctionElement` for a description
of public methods.
- See also:
-
- See also:
+ .. seealso::
- :data:`.func` - namespace which produces registered or ad-hoc
- :class:`.Function` instances.
+ :data:`.func` - namespace which produces registered or ad-hoc
+ :class:`.Function` instances.
- :class:`.GenericFunction` - allows creation of registered function
- types.
+ :class:`.GenericFunction` - allows creation of registered function
+ types.
"""
@@ -4725,7 +4725,9 @@ class SelectBase(Executable, FromClause):
"""return a 'scalar' representation of this selectable, embedded as a
subquery with a label.
- See also :meth:`~.SelectBase.as_scalar`.
+ .. seealso::
+
+ :meth:`~.SelectBase.as_scalar`.
"""
return self.as_scalar().label(name)
@@ -4843,9 +4845,9 @@ class SelectBase(Executable, FromClause):
result = conn.execute(statement).fetchall()
- See also:
+ .. seealso::
- :meth:`.orm.query.Query.cte` - ORM version of :meth:`.SelectBase.cte`.
+ :meth:`.orm.query.Query.cte` - ORM version of :meth:`.SelectBase.cte`.
"""
return CTE(self, name=name, recursive=recursive)
@@ -4914,6 +4916,10 @@ class SelectBase(Executable, FromClause):
The criterion will be appended to any pre-existing ORDER BY criterion.
+ This is an **in-place** mutation method; the
+ :meth:`~.SelectBase.order_by` method is preferred, as it provides standard
+ :term:`method chaining`.
+
"""
if len(clauses) == 1 and clauses[0] is None:
self._order_by_clause = ClauseList()
@@ -4927,6 +4933,10 @@ class SelectBase(Executable, FromClause):
The criterion will be appended to any pre-existing GROUP BY criterion.
+ This is an **in-place** mutation method; the
+ :meth:`~.SelectBase.group_by` method is preferred, as it provides standard
+ :term:`method chaining`.
+
"""
if len(clauses) == 1 and clauses[0] is None:
self._group_by_clause = ClauseList()
@@ -4980,7 +4990,7 @@ class CompoundSelect(SelectBase):
INTERSECT_ALL = util.symbol('INTERSECT ALL')
def __init__(self, keyword, *selects, **kwargs):
- self._should_correlate = kwargs.pop('correlate', False)
+ self._auto_correlate = kwargs.pop('correlate', False)
self.keyword = keyword
self.selects = []
@@ -5120,13 +5130,13 @@ class HasPrefixes(object):
class Select(HasPrefixes, SelectBase):
"""Represents a ``SELECT`` statement.
- See also:
+ .. seealso::
- :func:`~.expression.select` - the function which creates
- a :class:`.Select` object.
+ :func:`~.expression.select` - the function which creates
+ a :class:`.Select` object.
- :ref:`coretutorial_selecting` - Core Tutorial description
- of :func:`.select`.
+ :ref:`coretutorial_selecting` - Core Tutorial description
+ of :func:`.select`.
"""
@@ -5159,7 +5169,7 @@ class Select(HasPrefixes, SelectBase):
:class:`SelectBase` superclass.
"""
- self._should_correlate = correlate
+ self._auto_correlate = correlate
if distinct is not False:
if distinct is True:
self._distinct = True
@@ -5232,7 +5242,7 @@ class Select(HasPrefixes, SelectBase):
return froms
- def _get_display_froms(self, existing_froms=None):
+ def _get_display_froms(self, existing_froms=None, asfrom=False):
"""Return the full list of 'from' clauses to be displayed.
Takes into account a set of existing froms which may be
@@ -5258,18 +5268,29 @@ class Select(HasPrefixes, SelectBase):
# using a list to maintain ordering
froms = [f for f in froms if f not in toremove]
- if len(froms) > 1 or self._correlate or self._correlate_except:
+ if not asfrom:
if self._correlate:
- froms = [f for f in froms if f not in
- _cloned_intersection(froms,
- self._correlate)]
+ froms = [
+ f for f in froms if f not in
+ _cloned_intersection(
+ _cloned_intersection(froms, existing_froms or ()),
+ self._correlate
+ )
+ ]
if self._correlate_except:
- froms = [f for f in froms if f in _cloned_intersection(froms,
- self._correlate_except)]
- if self._should_correlate and existing_froms:
- froms = [f for f in froms if f not in
- _cloned_intersection(froms,
- existing_froms)]
+ froms = [
+ f for f in froms if f in
+ _cloned_intersection(
+ froms,
+ self._correlate_except
+ )
+ ]
+
+ if self._auto_correlate and existing_froms and len(froms) > 1:
+ froms = [
+ f for f in froms if f not in
+ _cloned_intersection(froms, existing_froms)
+ ]
if not len(froms):
raise exc.InvalidRequestError("Select statement '%s"
@@ -5642,7 +5663,7 @@ class Select(HasPrefixes, SelectBase):
:ref:`correlated_subqueries`
"""
- self._should_correlate = False
+ self._auto_correlate = False
if fromclauses and fromclauses[0] is None:
self._correlate = ()
else:
@@ -5662,7 +5683,7 @@ class Select(HasPrefixes, SelectBase):
:ref:`correlated_subqueries`
"""
- self._should_correlate = False
+ self._auto_correlate = False
if fromclauses and fromclauses[0] is None:
self._correlate_except = ()
else:
@@ -5671,9 +5692,15 @@ class Select(HasPrefixes, SelectBase):
def append_correlation(self, fromclause):
"""append the given correlation expression to this select()
- construct."""
+ construct.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.correlate` method is preferred, as it provides standard
+ :term:`method chaining`.
- self._should_correlate = False
+ """
+
+ self._auto_correlate = False
self._correlate = set(self._correlate).union(
_interpret_as_from(f) for f in fromclause)
@@ -5681,6 +5708,10 @@ class Select(HasPrefixes, SelectBase):
"""append the given column expression to the columns clause of this
select() construct.
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.column` method is preferred, as it provides standard
+ :term:`method chaining`.
+
"""
self._reset_exported()
column = _interpret_as_column_or_from(column)
@@ -5694,6 +5725,10 @@ class Select(HasPrefixes, SelectBase):
"""append the given columns clause prefix expression to this select()
construct.
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.prefix_with` method is preferred, as it provides standard
+ :term:`method chaining`.
+
"""
clause = _literal_as_text(clause)
self._prefixes = self._prefixes + (clause,)
@@ -5704,6 +5739,10 @@ class Select(HasPrefixes, SelectBase):
The expression will be joined to existing WHERE criterion via AND.
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.where` method is preferred, as it provides standard
+ :term:`method chaining`.
+
"""
self._reset_exported()
whereclause = _literal_as_text(whereclause)
@@ -5719,6 +5758,10 @@ class Select(HasPrefixes, SelectBase):
The expression will be joined to existing HAVING criterion via AND.
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.having` method is preferred, as it provides standard
+ :term:`method chaining`.
+
"""
if self._having is not None:
self._having = and_(self._having, _literal_as_text(having))
@@ -5729,18 +5772,56 @@ class Select(HasPrefixes, SelectBase):
"""append the given FromClause expression to this select() construct's
FROM clause.
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.select_from` method is preferred, as it provides standard
+ :term:`method chaining`.
+
"""
self._reset_exported()
fromclause = _interpret_as_from(fromclause)
self._from_obj = self._from_obj.union([fromclause])
+
+ @_memoized_property
+ def _columns_plus_names(self):
+ if self.use_labels:
+ names = set()
+ def name_for_col(c):
+ if c._label is None:
+ return (None, c)
+ name = c._label
+ if name in names:
+ name = c.anon_label
+ else:
+ names.add(name)
+ return name, c
+
+ return [
+ name_for_col(c)
+ for c in util.unique_list(_select_iterables(self._raw_columns))
+ ]
+ else:
+ return [
+ (None, c)
+ for c in util.unique_list(_select_iterables(self._raw_columns))
+ ]
+
def _populate_column_collection(self):
- for c in self.inner_columns:
- if hasattr(c, '_make_proxy'):
- c._make_proxy(self,
- name=c._label if self.use_labels else None,
- key=c._key_label if self.use_labels else None,
- name_is_truncatable=True)
+ for name, c in self._columns_plus_names:
+ if not hasattr(c, '_make_proxy'):
+ continue
+ if name is None:
+ key = None
+ elif self.use_labels:
+ key = c._key_label
+ if key is not None and key in self.c:
+ key = c.anon_label
+ else:
+ key = None
+
+ c._make_proxy(self, key=key,
+ name=name,
+ name_is_truncatable=True)
def _refresh_for_new_column(self, column):
for fromclause in self._froms:
@@ -6124,9 +6205,9 @@ class Insert(ValuesBase):
The :class:`.Insert` object is created using the
:func:`~.expression.insert()` function.
- See also:
+ .. seealso::
- :ref:`coretutorial_insert_expressions`
+ :ref:`coretutorial_insert_expressions`
"""
__visit_name__ = 'insert'
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index fd138cfec..520c90f99 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -13,12 +13,14 @@ from collections import deque
"""Utility functions that build upon SQL and Schema constructs."""
-def sort_tables(tables, skip_fn=None):
+def sort_tables(tables, skip_fn=None, extra_dependencies=None):
"""sort a collection of Table objects in order of
their foreign-key dependency."""
tables = list(tables)
tuples = []
+ if extra_dependencies is not None:
+ tuples.extend(extra_dependencies)
def visit_foreign_key(fkey):
if fkey.use_alter:
@@ -507,6 +509,9 @@ class AnnotatedColumnElement(Annotated):
"""pull 'key' from parent, if not present"""
return self._Annotated__element.key
+ @util.memoized_property
+ def info(self):
+ return self._Annotated__element.info
# hard-generate Annotated subclasses. this technique
# is used instead of on-the-fly types (i.e. type.__new__())
diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py
index 864ce5b4d..0e250f356 100644
--- a/lib/sqlalchemy/testing/assertsql.py
+++ b/lib/sqlalchemy/testing/assertsql.py
@@ -174,6 +174,8 @@ class CompiledSQL(SQLMatchRule):
params = self.params
if not isinstance(params, list):
params = [params]
+ else:
+ params = list(params)
all_params = list(params)
all_received = list(_received_parameters)
while params:
diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py
index 4ce76363e..5bd7ff3cd 100644
--- a/lib/sqlalchemy/testing/plugin/noseplugin.py
+++ b/lib/sqlalchemy/testing/plugin/noseplugin.py
@@ -215,11 +215,8 @@ def _set_table_options(options, file_config):
@post
def _reverse_topological(options, file_config):
if options.reversetop:
- from sqlalchemy.orm import unitofwork, session, mapper, dependency
- from sqlalchemy.util import topological
- from sqlalchemy.testing.util import RandomSet
- topological.set = unitofwork.set = session.set = mapper.set = \
- dependency.set = RandomSet
+ from sqlalchemy.orm.util import randomize_unitofwork
+ randomize_unitofwork()
def _requirements_opt(options, opt_str, value, parser):
@@ -361,7 +358,6 @@ class NoseSQLAlchemy(Plugin):
The class being examined by the selector
"""
-
if not issubclass(cls, fixtures.TestBase):
return False
elif cls.__name__.startswith('_'):
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index ae9d176b7..19a9731be 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -14,11 +14,12 @@ import pstats
import time
import collections
from .. import util
+
try:
import cProfile
except ImportError:
cProfile = None
-from ..util.compat import jython, pypy, win32
+from ..util import jython, pypy, win32, update_wrapper
_current_test = None
@@ -210,7 +211,6 @@ class ProfileStatsFile(object):
profile_f.write("%s %s %s\n" % (test_key, platform_key, c))
profile_f.close()
-from sqlalchemy.util.compat import update_wrapper
def function_call_count(variance=0.05):
diff --git a/lib/sqlalchemy/testing/runner.py b/lib/sqlalchemy/testing/runner.py
index 6ec73d7c8..2bdbaebd1 100644
--- a/lib/sqlalchemy/testing/runner.py
+++ b/lib/sqlalchemy/testing/runner.py
@@ -31,3 +31,13 @@ import nose
def main():
nose.main(addplugins=[NoseSQLAlchemy()])
+
+def setup_py_test():
+ """Runner to use for the 'test_suite' entry of your setup.py.
+
+ Prevents any name clash shenanigans from the command line
+ argument "test" that the "setup.py test" command sends
+ to nose.
+
+ """
+ nose.main(addplugins=[NoseSQLAlchemy()], argv=['runner'])
diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py
index ad233ec22..325d74f1e 100644
--- a/lib/sqlalchemy/testing/schema.py
+++ b/lib/sqlalchemy/testing/schema.py
@@ -66,18 +66,27 @@ def Column(*args, **kw):
col = schema.Column(*args, **kw)
if 'test_needs_autoincrement' in test_opts and \
- kw.get('primary_key', False) and \
- exclusions.against('firebird', 'oracle'):
- def add_seq(c, tbl):
- c._init_items(
- schema.Sequence(_truncate_name(
- config.db.dialect, tbl.name + '_' + c.name + '_seq'),
- optional=True)
- )
- event.listen(col, 'after_parent_attach', add_seq, propagate=True)
+ kw.get('primary_key', False):
+
+ # allow any test suite to pick up on this
+ col.info['test_needs_autoincrement'] = True
+
+ # hardcoded rule for firebird, oracle; this should
+ # be moved out
+ if exclusions.against('firebird', 'oracle'):
+ def add_seq(c, tbl):
+ c._init_items(
+ schema.Sequence(_truncate_name(
+ config.db.dialect, tbl.name + '_' + c.name + '_seq'),
+ optional=True)
+ )
+ event.listen(col, 'after_parent_attach', add_seq, propagate=True)
return col
+
+
+
def _truncate_name(dialect, name):
if len(name) > dialect.max_identifier_length:
return name[0:max(dialect.max_identifier_length - 6, 0)] + \
diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py
index 66aa1ecfa..a00fde312 100644
--- a/lib/sqlalchemy/testing/suite/test_insert.py
+++ b/lib/sqlalchemy/testing/suite/test_insert.py
@@ -33,7 +33,7 @@ class LastrowidTest(fixtures.TablesTest):
row = conn.execute(table.select()).first()
eq_(
row,
- (1, "some data")
+ (config.db.dialect.default_sequence_base, "some data")
)
def test_autoincrement_on_insert(self):
@@ -132,7 +132,7 @@ class ReturningTest(fixtures.TablesTest):
row = conn.execute(table.select()).first()
eq_(
row,
- (1, "some data")
+ (config.db.dialect.default_sequence_base, "some data")
)
@classmethod
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 5beed6aad..7cae48572 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -87,8 +87,10 @@ class ComponentReflectionTest(fixtures.TablesTest):
test_needs_fk=True,
)
- cls.define_index(metadata, users)
- cls.define_views(metadata, schema)
+ if testing.requires.index_reflection.enabled:
+ cls.define_index(metadata, users)
+ if testing.requires.view_reflection.enabled:
+ cls.define_views(metadata, schema)
@classmethod
def define_index(cls, metadata, users):
@@ -121,12 +123,14 @@ class ComponentReflectionTest(fixtures.TablesTest):
self.assert_('test_schema' in insp.get_schema_names())
+ @testing.requires.schema_reflection
def test_dialect_initialize(self):
engine = engines.testing_engine()
assert not hasattr(engine.dialect, 'default_schema_name')
inspect(engine)
assert hasattr(engine.dialect, 'default_schema_name')
+ @testing.requires.schema_reflection
def test_get_default_schema_name(self):
insp = inspect(testing.db)
eq_(insp.default_schema_name, testing.db.dialect.default_schema_name)
@@ -157,6 +161,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
self._test_get_table_names()
@testing.requires.table_reflection
+ @testing.requires.foreign_key_constraint_reflection
def test_get_table_names_fks(self):
self._test_get_table_names(order_by='foreign_key')
@@ -261,6 +266,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
self._test_get_pk_constraint()
@testing.requires.table_reflection
+ @testing.requires.primary_key_constraint_reflection
@testing.requires.schemas
def test_get_pk_constraint_with_schema(self):
self._test_get_pk_constraint(schema='test_schema')
diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py
index 2592c341e..d9ff14eaf 100644
--- a/lib/sqlalchemy/testing/util.py
+++ b/lib/sqlalchemy/testing/util.py
@@ -1,6 +1,5 @@
from ..util import jython, pypy, defaultdict, decorator
-from ..util.compat import decimal
-
+import decimal
import gc
import time
import random
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index b9f7b9444..1824a9b3f 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -25,10 +25,10 @@ import codecs
from . import exc, schema, util, processors, events, event
from .sql import operators
-from .sql.expression import _DefaultColumnComparator, column, bindparam
+from .sql.expression import _DefaultColumnComparator
from .util import pickle
-from .util.compat import decimal
from .sql.visitors import Visitable
+import decimal
default = util.importlater("sqlalchemy.engine", "default")
NoneType = type(None)
@@ -1372,8 +1372,7 @@ class Numeric(_DateAffinity, TypeEngine):
implementations however, most of which contain an import for plain
``decimal`` in their source code, even though some such as psycopg2
provide hooks for alternate adapters. SQLAlchemy imports ``decimal``
- globally as well. While the alternate ``Decimal`` class can be patched
- into SQLA's ``decimal`` module, overall the most straightforward and
+ globally as well. The most straightforward and
foolproof way to use "cdecimal" given current DBAPI and Python support
is to patch it directly into sys.modules before anything else is
imported::
@@ -1712,8 +1711,15 @@ class _Binary(TypeEngine):
return process
# Python 3 has native bytes() type
- # both sqlite3 and pg8000 seem to return it
- # (i.e. and not 'memoryview')
+ # both sqlite3 and pg8000 seem to return it,
+ # psycopg2 as of 2.5 returns 'memoryview'
+ # Py3K
+ #def result_processor(self, dialect, coltype):
+ # def process(value):
+ # if value is not None:
+ # value = bytes(value)
+ # return value
+ # return process
# Py2K
def result_processor(self, dialect, coltype):
if util.jython:
diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py
index 249c46ead..3fa06c793 100644
--- a/lib/sqlalchemy/util/__init__.py
+++ b/lib/sqlalchemy/util/__init__.py
@@ -4,10 +4,10 @@
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-from .compat import callable, cmp, reduce, defaultdict, py25_dict, \
+from .compat import callable, cmp, reduce, \
threading, py3k, py3k_warning, jython, pypy, cpython, win32, set_types, \
- buffer, pickle, update_wrapper, partial, md5_hex, decode_slice, \
- dottedgetter, parse_qsl, any, contextmanager, namedtuple, next, WeakSet
+ pickle, dottedgetter, parse_qsl, namedtuple, next, WeakSet, reraise, \
+ raise_from_cause
from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
@@ -21,13 +21,20 @@ from .langhelpers import iterate_attributes, class_hierarchy, \
portable_instancemethod, unbound_method_to_callable, \
getargspec_init, format_argspec_init, format_argspec_plus, \
get_func_kwargs, get_cls_kwargs, decorator, as_interface, \
- memoized_property, memoized_instancemethod, \
- group_expirable_memoized_property, importlater, \
+ memoized_property, memoized_instancemethod, md5_hex, \
+ group_expirable_memoized_property, importlater, decode_slice, \
monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\
duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
classproperty, set_creation_order, warn_exception, warn, NoneType,\
constructor_copy, methods_equivalent, chop_traceback, asint,\
- generic_repr, counter, PluginLoader, hybridmethod
+ generic_repr, counter, PluginLoader, hybridmethod, safe_reraise
from .deprecations import warn_deprecated, warn_pending_deprecation, \
deprecated, pending_deprecation
+
+# things that used to be not always available,
+# but are now as of current support Python versions
+from collections import defaultdict
+from functools import partial
+from functools import update_wrapper
+from contextlib import contextmanager
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index ca77103b2..8e61275e7 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -6,7 +6,6 @@
"""Collection classes and helpers."""
-import sys
import itertools
import weakref
import operator
@@ -649,43 +648,30 @@ class OrderedIdentitySet(IdentitySet):
self.add(o)
-if sys.version_info >= (2, 5):
- class PopulateDict(dict):
- """A dict which populates missing values via a creation function.
+class PopulateDict(dict):
+ """A dict which populates missing values via a creation function.
- Note the creation function takes a key, unlike
- collections.defaultdict.
+ Note the creation function takes a key, unlike
+ collections.defaultdict.
- """
-
- def __init__(self, creator):
- self.creator = creator
-
- def __missing__(self, key):
- self[key] = val = self.creator(key)
- return val
-else:
- class PopulateDict(dict):
- """A dict which populates missing values via a creation function."""
+ """
- def __init__(self, creator):
- self.creator = creator
+ def __init__(self, creator):
+ self.creator = creator
- def __getitem__(self, key):
- try:
- return dict.__getitem__(self, key)
- except KeyError:
- self[key] = value = self.creator(key)
- return value
+ def __missing__(self, key):
+ self[key] = val = self.creator(key)
+ return val
-# define collections that are capable of storing
+# Define collections that are capable of storing
# ColumnElement objects as hashable keys/elements.
+# At this point, these are mostly historical, things
+# used to be more complicated.
column_set = set
column_dict = dict
ordered_column_set = OrderedSet
populate_column_dict = PopulateDict
-
def unique_list(seq, hashfunc=None):
seen = {}
if not hashfunc:
diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py
index 3725a8491..033a87cc7 100644
--- a/lib/sqlalchemy/util/compat.py
+++ b/lib/sqlalchemy/util/compat.py
@@ -54,44 +54,6 @@ else:
except ImportError:
import pickle
-
-# a controversial feature, required by MySQLdb currently
-def buffer(x):
- return x
-
-# Py2K
-buffer = buffer
-# end Py2K
-
-try:
- from contextlib import contextmanager
-except ImportError:
- def contextmanager(fn):
- return fn
-
-try:
- from functools import update_wrapper
-except ImportError:
- def update_wrapper(wrapper, wrapped,
- assigned=('__doc__', '__module__', '__name__'),
- updated=('__dict__',)):
- for attr in assigned:
- setattr(wrapper, attr, getattr(wrapped, attr))
- for attr in updated:
- getattr(wrapper, attr).update(getattr(wrapped, attr, ()))
- return wrapper
-
-try:
- from functools import partial
-except ImportError:
- def partial(func, *args, **keywords):
- def newfunc(*fargs, **fkeywords):
- newkeywords = keywords.copy()
- newkeywords.update(fkeywords)
- return func(*(args + fargs), **newkeywords)
- return newfunc
-
-
if sys.version_info < (2, 6):
# emits a nasty deprecation warning
# in newer pythons
@@ -132,52 +94,6 @@ except ImportError:
return tuptype
try:
- from collections import defaultdict
-except ImportError:
- class defaultdict(dict):
-
- def __init__(self, default_factory=None, *a, **kw):
- if (default_factory is not None and
- not hasattr(default_factory, '__call__')):
- raise TypeError('first argument must be callable')
- dict.__init__(self, *a, **kw)
- self.default_factory = default_factory
-
- def __getitem__(self, key):
- try:
- return dict.__getitem__(self, key)
- except KeyError:
- return self.__missing__(key)
-
- def __missing__(self, key):
- if self.default_factory is None:
- raise KeyError(key)
- self[key] = value = self.default_factory()
- return value
-
- def __reduce__(self):
- if self.default_factory is None:
- args = tuple()
- else:
- args = self.default_factory,
- return type(self), args, None, None, self.iteritems()
-
- def copy(self):
- return self.__copy__()
-
- def __copy__(self):
- return type(self)(self.default_factory, self)
-
- def __deepcopy__(self, memo):
- import copy
- return type(self)(self.default_factory,
- copy.deepcopy(self.items()))
-
- def __repr__(self):
- return 'defaultdict(%s, %s)' % (self.default_factory,
- dict.__repr__(self))
-
-try:
from weakref import WeakSet
except:
import weakref
@@ -199,79 +115,12 @@ except:
def add(self, other):
self._storage[other] = True
-
-# find or create a dict implementation that supports __missing__
-class _probe(dict):
- def __missing__(self, key):
- return 1
-
-try:
- try:
- _probe()['missing']
- py25_dict = dict
- except KeyError:
- class py25_dict(dict):
- def __getitem__(self, key):
- try:
- return dict.__getitem__(self, key)
- except KeyError:
- try:
- missing = self.__missing__
- except AttributeError:
- raise KeyError(key)
- else:
- return missing(key)
-finally:
- del _probe
-
-
-try:
- import hashlib
- _md5 = hashlib.md5
-except ImportError:
- import md5
- _md5 = md5.new
-
-
-def md5_hex(x):
- # Py3K
- #x = x.encode('utf-8')
- m = _md5()
- m.update(x)
- return m.hexdigest()
-
import time
if win32 or jython:
time_func = time.clock
else:
time_func = time.time
-if sys.version_info >= (2, 5):
- any = any
-else:
- def any(iterator):
- for item in iterator:
- if bool(item):
- return True
- else:
- return False
-
-if sys.version_info >= (2, 5):
- def decode_slice(slc):
- """decode a slice object as sent to __getitem__.
-
- takes into account the 2.5 __index__() method, basically.
-
- """
- ret = []
- for x in slc.start, slc.stop, slc.step:
- if hasattr(x, '__index__'):
- x = x.__index__()
- ret.append(x)
- return tuple(ret)
-else:
- def decode_slice(slc):
- return (slc.start, slc.stop, slc.step)
if sys.version_info >= (2, 6):
from operator import attrgetter as dottedgetter
@@ -283,5 +132,36 @@ else:
return obj
return g
+# Adapted from six.py
+if py3k:
+ def b(s):
+ return s.encode("latin-1")
+else:
+ def b(s):
+ return s
+
+
+if py3k:
+ def reraise(tp, value, tb=None, cause=None):
+ if cause is not None:
+ value.__cause__ = cause
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+ def raise_from_cause(exception, exc_info):
+ exc_type, exc_value, exc_tb = exc_info
+ reraise(type(exception), exception, tb=exc_tb, cause=exc_value)
+else:
+ exec("def reraise(tp, value, tb=None, cause=None):\n"
+ " raise tp, value, tb\n")
+
+ def raise_from_cause(exception, exc_info):
+ # not as nice as that of Py3K, but at least preserves
+ # the code line where the issue occurred
+ exc_type, exc_value, exc_tb = exc_info
+ reraise(type(exception), exception, tb=exc_tb)
+
+
+
-import decimal
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index a9b791234..f6d9164e6 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -15,10 +15,63 @@ import re
import sys
import types
import warnings
-from .compat import update_wrapper, set_types, threading, \
+from .compat import set_types, threading, \
callable, inspect_getfullargspec
+from functools import update_wrapper
from .. import exc
+import hashlib
+from . import compat
+def md5_hex(x):
+ # Py3K
+ #x = x.encode('utf-8')
+ m = hashlib.md5()
+ m.update(x)
+ return m.hexdigest()
+
+class safe_reraise(object):
+ """Reraise an exception after invoking some
+ handler code.
+
+ Stores the existing exception info before
+ invoking so that it is maintained across a potential
+ coroutine context switch.
+
+ e.g.::
+
+ try:
+ sess.commit()
+ except:
+ with safe_reraise():
+ sess.rollback()
+
+ """
+
+ def __enter__(self):
+ self._exc_info = sys.exc_info()
+
+ def __exit__(self, type_, value, traceback):
+ # see #2703 for notes
+ if type_ is None:
+ exc_type, exc_value, exc_tb = self._exc_info
+ self._exc_info = None # remove potential circular references
+ compat.reraise(exc_type, exc_value, exc_tb)
+ else:
+ self._exc_info = None # remove potential circular references
+ compat.reraise(type_, value, traceback)
+
+def decode_slice(slc):
+ """decode a slice object as sent to __getitem__.
+
+ takes into account the 2.5 __index__() method, basically.
+
+ """
+ ret = []
+ for x in slc.start, slc.stop, slc.step:
+ if hasattr(x, '__index__'):
+ x = x.__index__()
+ ret.append(x)
+ return tuple(ret)
def _unique_symbols(used, *bases):
used = set(used)
@@ -123,7 +176,7 @@ def get_cls_kwargs(cls):
ctr = class_.__dict__.get('__init__', False)
if (not ctr or
not isinstance(ctr, types.FunctionType) or
- not isinstance(ctr.func_code, types.CodeType)):
+ not isinstance(ctr.func_code, types.CodeType)):
stack.update(class_.__bases__)
continue
@@ -256,7 +309,6 @@ def format_argspec_init(method, grouped=True):
try:
return format_argspec_plus(method, grouped=grouped)
except TypeError:
- self_arg = 'self'
if method is object.__init__:
args = grouped and '(self)' or 'self'
else:
@@ -784,7 +836,7 @@ def duck_type_collection(specimen, default=None):
if hasattr(specimen, '__emulates__'):
# canonicalize set vs sets.Set to a standard: the builtin set
if (specimen.__emulates__ is not None and
- issubclass(specimen.__emulates__, set_types)):
+ issubclass(specimen.__emulates__, set_types)):
return set
else:
return specimen.__emulates__
diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py
index 6f895e7b7..de3dfd0ae 100644
--- a/lib/sqlalchemy/util/topological.py
+++ b/lib/sqlalchemy/util/topological.py
@@ -49,7 +49,8 @@ def sort(tuples, allitems):
def find_cycles(tuples, allitems):
- # straight from gvr with some mods
+ # adapted from:
+ # http://neopythonic.blogspot.com/2009/01/detecting-cycles-in-directed-graph.html
edges = util.defaultdict(set)
for parent, child in tuples:
diff --git a/setup.cfg b/setup.cfg
index a3894cd4e..92bdbc40f 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -7,6 +7,10 @@ exclude = ^examples
first-package-wins = true
where = test
+[upload]
+sign = 1
+identity = C4DAFEE1
+
[sqla_testing]
requirement_cls=test.requirements:DefaultRequirements
profile_file=test/profiles.txt
diff --git a/test/aaa_profiling/test_compiler.py b/test/aaa_profiling/test_compiler.py
index 2776f05ab..1b7798d06 100644
--- a/test/aaa_profiling/test_compiler.py
+++ b/test/aaa_profiling/test_compiler.py
@@ -60,4 +60,16 @@ class CompileTest(fixtures.TestBase, AssertsExecutionResults):
def go():
s = select([t1], t1.c.c2 == t2.c.c1)
s.compile(dialect=self.dialect)
+ go()
+
+ def test_select_labels(self):
+ # give some of the cached type values
+ # a chance to warm up
+ s = select([t1], t1.c.c2 == t2.c.c1).apply_labels()
+ s.compile(dialect=self.dialect)
+
+ @profiling.function_call_count()
+ def go():
+ s = select([t1], t1.c.c2 == t2.c.c1).apply_labels()
+ s.compile(dialect=self.dialect)
go() \ No newline at end of file
diff --git a/test/aaa_profiling/test_memusage.py b/test/aaa_profiling/test_memusage.py
index aabc0a2bc..57bddc859 100644
--- a/test/aaa_profiling/test_memusage.py
+++ b/test/aaa_profiling/test_memusage.py
@@ -14,7 +14,7 @@ from sqlalchemy.sql import column
from sqlalchemy.processors import to_decimal_processor_factory, \
to_unicode_processor_factory
from sqlalchemy.testing.util import gc_collect
-from sqlalchemy.util.compat import decimal
+import decimal
import gc
from sqlalchemy.testing import fixtures
import weakref
@@ -307,6 +307,7 @@ class MemUsageTest(EnsureZeroed):
finally:
metadata.drop_all()
+ @testing.crashes('mysql+cymysql', 'blocking with cymysql >= 0.6')
def test_unicode_warnings(self):
metadata = MetaData(testing.db)
table1 = Table('mytable', metadata, Column('col1', Integer,
diff --git a/test/dialect/test_mssql.py b/test/dialect/test_mssql.py
index 52ba77310..f1cd3fe85 100644
--- a/test/dialect/test_mssql.py
+++ b/test/dialect/test_mssql.py
@@ -13,10 +13,10 @@ from sqlalchemy.engine import url
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
AssertsExecutionResults, ComparesTables
from sqlalchemy import testing
-from sqlalchemy.testing import eq_, emits_warning_on, \
- assert_raises_message
-from sqlalchemy.util.compat import decimal
+from sqlalchemy.testing import emits_warning_on, assert_raises_message
+import decimal
from sqlalchemy.engine.reflection import Inspector
+from sqlalchemy.util.compat import b
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = mssql.dialect()
@@ -1210,28 +1210,28 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
eq_([1, 3, 5], [r.id for r in results])
-class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL):
- @classmethod
- def setup_class(cls):
- global dialect
- dialect = pyodbc.dialect()
+class ParseConnectTest(fixtures.TestBase):
def test_pyodbc_connect_dsn_trusted(self):
+ dialect = pyodbc.dialect()
u = url.make_url('mssql://mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
def test_pyodbc_connect_old_style_dsn_trusted(self):
+ dialect = pyodbc.dialect()
u = url.make_url('mssql:///?dsn=mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
def test_pyodbc_connect_dsn_non_trusted(self):
+ dialect = pyodbc.dialect()
u = url.make_url('mssql://username:password@mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_dsn_extra(self):
+ dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@mydsn/?LANGUAGE=us_'
'english&foo=bar')
@@ -1241,12 +1241,14 @@ class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL):
assert ";foo=bar" in dsn_string
def test_pyodbc_connect(self):
+ dialect = pyodbc.dialect()
u = url.make_url('mssql://username:password@hostspec/database')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pyodbc_connect_comma_port(self):
+ dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec:12345/data'
'base')
@@ -1255,6 +1257,7 @@ class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL):
'ase;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_config_port(self):
+ dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec/database?p'
'ort=12345')
@@ -1263,6 +1266,7 @@ class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL):
'D=username;PWD=password;port=12345'], {}], connection)
def test_pyodbc_extra_connect(self):
+ dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec/database?L'
'ANGUAGE=us_english&foo=bar')
@@ -1275,6 +1279,7 @@ class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL):
'username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
def test_pyodbc_odbc_connect(self):
+ dialect = pyodbc.dialect()
u = \
url.make_url('mssql:///?odbc_connect=DRIVER%3D%7BSQL+Server'
'%7D%3BServer%3Dhostspec%3BDatabase%3Ddatabase'
@@ -1284,6 +1289,7 @@ class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL):
'D=username;PWD=password'], {}], connection)
def test_pyodbc_odbc_connect_with_dsn(self):
+ dialect = pyodbc.dialect()
u = \
url.make_url('mssql:///?odbc_connect=dsn%3Dmydsn%3BDatabase'
'%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword'
@@ -1293,6 +1299,7 @@ class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL):
{}], connection)
def test_pyodbc_odbc_connect_ignores_other_values(self):
+ dialect = pyodbc.dialect()
u = \
url.make_url('mssql://userdiff:passdiff@localhost/dbdiff?od'
'bc_connect=DRIVER%3D%7BSQL+Server%7D%3BServer'
@@ -1321,7 +1328,22 @@ class ParseConnectTest(fixtures.TestBase, AssertsCompiledSQL):
'user': 'scott', 'database': 'test'}], connection
)
- @testing.only_on(['mssql+pyodbc', 'mssql+pymssql'], "FreeTDS specific test")
+ def test_pymssql_disconnect(self):
+ dialect = pymssql.dialect()
+
+ for error in [
+ 'Adaptive Server connection timed out',
+ 'message 20003',
+ "Error 10054",
+ "Not connected to any MS SQL server",
+ "Connection is closed"
+ ]:
+ eq_(dialect.is_disconnect(error, None, None), True)
+
+ eq_(dialect.is_disconnect("not an error", None, None), False)
+
+ @testing.only_on(['mssql+pyodbc', 'mssql+pymssql'],
+ "FreeTDS specific test")
def test_bad_freetds_warning(self):
engine = engines.testing_engine()
@@ -1926,6 +1948,21 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
not in list(engine.execute(tbl.select()).first())
engine.execute(tbl.delete())
+class MonkeyPatchedBinaryTest(fixtures.TestBase):
+ __only_on__ = 'mssql+pymssql'
+
+ def test_unicode(self):
+ module = __import__('pymssql')
+ result = module.Binary(u'foo')
+ eq_(result, u'foo')
+
+ def test_bytes(self):
+ module = __import__('pymssql')
+ input = b('\x80\x03]q\x00X\x03\x00\x00\x00oneq\x01a.')
+ expected_result = input
+ result = module.Binary(input)
+ eq_(result, expected_result)
+
class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
"""Test the Binary and VarBinary types"""
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index 7384d7bb4..861b28c5f 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -7,12 +7,11 @@ from sqlalchemy import types as sqltypes, exc, schema
from sqlalchemy.sql import table, column
from sqlalchemy.testing import fixtures, AssertsExecutionResults, AssertsCompiledSQL
from sqlalchemy import testing
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.dialects.oracle import cx_oracle, base as oracle
from sqlalchemy.engine import default
-from sqlalchemy.util import jython
-from sqlalchemy.util.compat import decimal
+import decimal
from sqlalchemy.testing.schema import Table, Column
import datetime
import os
diff --git a/test/dialect/test_postgresql.py b/test/dialect/test_postgresql.py
index 3337fa6ab..005aed1ce 100644
--- a/test/dialect/test_postgresql.py
+++ b/test/dialect/test_postgresql.py
@@ -17,8 +17,8 @@ from sqlalchemy import Table, Column, select, MetaData, text, Integer, \
from sqlalchemy.orm import Session, mapper, aliased
from sqlalchemy import exc, schema, types
from sqlalchemy.dialects.postgresql import base as postgresql
-from sqlalchemy.dialects.postgresql import HSTORE, hstore, array, ARRAY
-from sqlalchemy.util.compat import decimal
+from sqlalchemy.dialects.postgresql import HSTORE, hstore, array
+import decimal
from sqlalchemy.testing.util import round_decimal
from sqlalchemy.sql import table, column, operators
import logging
@@ -180,6 +180,14 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'USING hash (data)',
dialect=postgresql.dialect())
+ def test_substring(self):
+ self.assert_compile(func.substring('abc', 1, 2),
+ 'SUBSTRING(%(substring_1)s FROM %(substring_2)s '
+ 'FOR %(substring_3)s)')
+ self.assert_compile(func.substring('abc', 1),
+ 'SUBSTRING(%(substring_1)s FROM %(substring_2)s)')
+
+
def test_extract(self):
t = table('t', column('col1', DateTime), column('col2', Date),
@@ -734,7 +742,6 @@ class NumericInterpretationTest(fixtures.TestBase):
def test_numeric_codes(self):
from sqlalchemy.dialects.postgresql import pg8000, psycopg2, base
- from sqlalchemy.util.compat import decimal
for dialect in (pg8000.dialect(), psycopg2.dialect()):
@@ -3094,6 +3101,12 @@ class HStoreRoundTripTest(fixtures.TablesTest):
engine.connect()
return engine
+ def test_reflect(self):
+ from sqlalchemy import inspect
+ insp = inspect(testing.db)
+ cols = insp.get_columns('data_table')
+ assert isinstance(cols[2]['type'], HSTORE)
+
@testing.only_on("postgresql+psycopg2")
def test_insert_native(self):
engine = testing.db
diff --git a/test/engine/test_ddlemit.py b/test/engine/test_ddlemit.py
new file mode 100644
index 000000000..3dbd5756a
--- /dev/null
+++ b/test/engine/test_ddlemit.py
@@ -0,0 +1,184 @@
+from sqlalchemy.testing import fixtures
+from sqlalchemy.engine.ddl import SchemaGenerator, SchemaDropper
+from sqlalchemy.engine import default
+from sqlalchemy import MetaData, Table, Column, Integer, Sequence
+from sqlalchemy import schema
+
+class EmitDDLTest(fixtures.TestBase):
+ def _mock_connection(self, item_exists):
+ _canary = []
+
+ class MockDialect(default.DefaultDialect):
+ supports_sequences = True
+
+ def has_table(self, connection, name, schema):
+ return item_exists(name)
+
+ def has_sequence(self, connection, name, schema):
+ return item_exists(name)
+
+ class MockConnection(object):
+ dialect = MockDialect()
+ canary = _canary
+
+ def execute(self, item):
+ _canary.append(item)
+
+ return MockConnection()
+
+ def _mock_create_fixture(self, checkfirst, tables,
+ item_exists=lambda item: False):
+ connection = self._mock_connection(item_exists)
+
+ return SchemaGenerator(connection.dialect, connection,
+ checkfirst=checkfirst,
+ tables=tables)
+
+ def _mock_drop_fixture(self, checkfirst, tables,
+ item_exists=lambda item: True):
+ connection = self._mock_connection(item_exists)
+
+ return SchemaDropper(connection.dialect, connection,
+ checkfirst=checkfirst,
+ tables=tables)
+
+ def _table_fixture(self):
+ m = MetaData()
+
+ return (m, ) + tuple(
+ Table('t%d' % i, m, Column('x', Integer))
+ for i in xrange(1, 6)
+ )
+
+ def _table_seq_fixture(self):
+ m = MetaData()
+
+ s1 = Sequence('s1')
+ s2 = Sequence('s2')
+ t1 = Table('t1', m, Column("x", Integer, s1, primary_key=True))
+ t2 = Table('t2', m, Column("x", Integer, s2, primary_key=True))
+
+ return m, t1, t2, s1, s2
+
+
+ def test_create_seq_checkfirst(self):
+ m, t1, t2, s1, s2 = self._table_seq_fixture()
+ generator = self._mock_create_fixture(True, [t1, t2],
+ item_exists=lambda t: t not in ("t1", "s1")
+ )
+
+ self._assert_create([t1, s1], generator, m)
+
+
+ def test_drop_seq_checkfirst(self):
+ m, t1, t2, s1, s2 = self._table_seq_fixture()
+ generator = self._mock_drop_fixture(True, [t1, t2],
+ item_exists=lambda t: t in ("t1", "s1")
+ )
+
+ self._assert_drop([t1, s1], generator, m)
+
+ def test_create_collection_checkfirst(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_create_fixture(True, [t2, t3, t4],
+ item_exists=lambda t: t not in ("t2", "t4")
+ )
+
+ self._assert_create_tables([t2, t4], generator, m)
+
+ def test_drop_collection_checkfirst(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_drop_fixture(True, [t2, t3, t4],
+ item_exists=lambda t: t in ("t2", "t4")
+ )
+
+ self._assert_drop_tables([t2, t4], generator, m)
+
+ def test_create_collection_nocheck(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_create_fixture(False, [t2, t3, t4],
+ item_exists=lambda t: t not in ("t2", "t4")
+ )
+
+ self._assert_create_tables([t2, t3, t4], generator, m)
+
+ def test_create_empty_collection(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_create_fixture(True, [],
+ item_exists=lambda t: t not in ("t2", "t4")
+ )
+
+ self._assert_create_tables([], generator, m)
+
+ def test_drop_empty_collection(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_drop_fixture(True, [],
+ item_exists=lambda t: t in ("t2", "t4")
+ )
+
+ self._assert_drop_tables([], generator, m)
+
+ def test_drop_collection_nocheck(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_drop_fixture(False, [t2, t3, t4],
+ item_exists=lambda t: t in ("t2", "t4")
+ )
+
+ self._assert_drop_tables([t2, t3, t4], generator, m)
+
+ def test_create_metadata_checkfirst(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_create_fixture(True, None,
+ item_exists=lambda t: t not in ("t2", "t4")
+ )
+
+ self._assert_create_tables([t2, t4], generator, m)
+
+ def test_drop_metadata_checkfirst(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_drop_fixture(True, None,
+ item_exists=lambda t: t in ("t2", "t4")
+ )
+
+ self._assert_drop_tables([t2, t4], generator, m)
+
+ def test_create_metadata_nocheck(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_create_fixture(False, None,
+ item_exists=lambda t: t not in ("t2", "t4")
+ )
+
+ self._assert_create_tables([t1, t2, t3, t4, t5], generator, m)
+
+ def test_drop_metadata_nocheck(self):
+ m, t1, t2, t3, t4, t5 = self._table_fixture()
+ generator = self._mock_drop_fixture(False, None,
+ item_exists=lambda t: t in ("t2", "t4")
+ )
+
+ self._assert_drop_tables([t1, t2, t3, t4, t5], generator, m)
+
+ def _assert_create_tables(self, elements, generator, argument):
+ self._assert_ddl(schema.CreateTable, elements, generator, argument)
+
+ def _assert_drop_tables(self, elements, generator, argument):
+ self._assert_ddl(schema.DropTable, elements, generator, argument)
+
+ def _assert_create(self, elements, generator, argument):
+ self._assert_ddl(
+ (schema.CreateTable, schema.CreateSequence),
+ elements, generator, argument)
+
+ def _assert_drop(self, elements, generator, argument):
+ self._assert_ddl(
+ (schema.DropTable, schema.DropSequence),
+ elements, generator, argument)
+
+ def _assert_ddl(self, ddl_cls, elements, generator, argument):
+ generator.traverse_single(argument)
+ for c in generator.connection.canary:
+ assert isinstance(c, ddl_cls)
+ assert c.element in elements, "element %r was not expected"\
+ % c.element
+ elements.remove(c.element)
+ assert not elements, "elements remain in list: %r" % elements
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index d14cde245..203d7bd71 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -13,7 +13,7 @@ import sqlalchemy as tsa
from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy.testing.engines import testing_engine
-import logging
+import logging.handlers
from sqlalchemy.dialects.oracle.zxjdbc import ReturningParam
from sqlalchemy.engine import result as _result, default
from sqlalchemy.engine.base import Connection, Engine
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index 6b283654b..9aecb81a9 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -11,7 +11,10 @@ from sqlalchemy import exc
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.engines import testing_engine
-class MockDisconnect(Exception):
+class MockError(Exception):
+ pass
+
+class MockDisconnect(MockError):
pass
class MockDBAPI(object):
@@ -20,17 +23,23 @@ class MockDBAPI(object):
self.connections = weakref.WeakKeyDictionary()
def connect(self, *args, **kwargs):
return MockConnection(self)
- def shutdown(self):
+ def shutdown(self, explode='execute'):
for c in self.connections:
- c.explode[0] = True
- Error = MockDisconnect
+ c.explode = explode
+ Error = MockError
class MockConnection(object):
def __init__(self, dbapi):
dbapi.connections[self] = True
- self.explode = [False]
+ self.explode = ""
def rollback(self):
- pass
+ if self.explode == 'rollback':
+ raise MockDisconnect("Lost the DB connection on rollback")
+ if self.explode == 'rollback_no_disconnect':
+ raise MockError(
+ "something broke on rollback but we didn't lose the connection")
+ else:
+ return
def commit(self):
pass
def cursor(self):
@@ -42,13 +51,30 @@ class MockCursor(object):
def __init__(self, parent):
self.explode = parent.explode
self.description = ()
+ self.closed = False
def execute(self, *args, **kwargs):
- if self.explode[0]:
- raise MockDisconnect("Lost the DB connection")
+ if self.explode == 'execute':
+ raise MockDisconnect("Lost the DB connection on execute")
+ elif self.explode in ('execute_no_disconnect', ):
+ raise MockError(
+ "something broke on execute but we didn't lose the connection")
+ elif self.explode in ('rollback', 'rollback_no_disconnect'):
+ raise MockError(
+ "something broke on execute but we didn't lose the connection")
+ elif args and "select" in args[0]:
+ self.description = [('foo', None, None, None, None, None)]
else:
return
+ def fetchall(self):
+ if self.closed:
+ raise MockError("cursor closed")
+ return []
+ def fetchone(self):
+ if self.closed:
+ raise MockError("cursor closed")
+ return None
def close(self):
- pass
+ self.closed = True
db, dbapi = None, None
class MockReconnectTest(fixtures.TestBase):
@@ -167,12 +193,10 @@ class MockReconnectTest(fixtures.TestBase):
dbapi.shutdown()
- # raises error
- try:
- conn.execute(select([1]))
- assert False
- except tsa.exc.DBAPIError:
- pass
+ assert_raises(
+ tsa.exc.DBAPIError,
+ conn.execute, select([1])
+ )
assert not conn.closed
assert conn.invalidated
@@ -186,6 +210,112 @@ class MockReconnectTest(fixtures.TestBase):
assert not conn.invalidated
assert len(dbapi.connections) == 1
+ def test_invalidated_close(self):
+ conn = db.connect()
+
+ dbapi.shutdown()
+
+ assert_raises(
+ tsa.exc.DBAPIError,
+ conn.execute, select([1])
+ )
+
+ conn.close()
+ assert conn.closed
+ assert conn.invalidated
+ assert_raises_message(
+ tsa.exc.StatementError,
+ "This Connection is closed",
+ conn.execute, select([1])
+ )
+
+ def test_noreconnect_execute_plus_closewresult(self):
+ conn = db.connect(close_with_result=True)
+
+ dbapi.shutdown("execute_no_disconnect")
+
+ # raises error
+ assert_raises_message(
+ tsa.exc.DBAPIError,
+ "something broke on execute but we didn't lose the connection",
+ conn.execute, select([1])
+ )
+
+ assert conn.closed
+ assert not conn.invalidated
+
+ def test_noreconnect_rollback_plus_closewresult(self):
+ conn = db.connect(close_with_result=True)
+
+ dbapi.shutdown("rollback_no_disconnect")
+
+ # raises error
+ assert_raises_message(
+ tsa.exc.DBAPIError,
+ "something broke on rollback but we didn't lose the connection",
+ conn.execute, select([1])
+ )
+
+ assert conn.closed
+ assert not conn.invalidated
+
+ assert_raises_message(
+ tsa.exc.StatementError,
+ "This Connection is closed",
+ conn.execute, select([1])
+ )
+
+ def test_reconnect_on_reentrant(self):
+ conn = db.connect()
+
+ conn.execute(select([1]))
+
+ assert len(dbapi.connections) == 1
+
+ dbapi.shutdown("rollback")
+
+ # raises error
+ assert_raises_message(
+ tsa.exc.DBAPIError,
+ "Lost the DB connection on rollback",
+ conn.execute, select([1])
+ )
+
+ assert not conn.closed
+ assert conn.invalidated
+
+ def test_reconnect_on_reentrant_plus_closewresult(self):
+ conn = db.connect(close_with_result=True)
+
+ dbapi.shutdown("rollback")
+
+ # raises error
+ assert_raises_message(
+ tsa.exc.DBAPIError,
+ "Lost the DB connection on rollback",
+ conn.execute, select([1])
+ )
+
+ assert conn.closed
+ assert conn.invalidated
+
+ assert_raises_message(
+ tsa.exc.StatementError,
+ "This Connection is closed",
+ conn.execute, select([1])
+ )
+
+ def test_check_disconnect_no_cursor(self):
+ conn = db.connect()
+ result = conn.execute("select 1")
+ result.cursor.close()
+ conn.close()
+ assert_raises_message(
+ tsa.exc.DBAPIError,
+ "cursor closed",
+ list, result
+ )
+
class CursorErrTest(fixtures.TestBase):
def setup(self):
diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py
index ab78cc3e2..f0372e8ee 100644
--- a/test/ext/declarative/test_inheritance.py
+++ b/test/ext/declarative/test_inheritance.py
@@ -14,7 +14,8 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \
Session
from sqlalchemy.testing import eq_
from sqlalchemy.util import classproperty
-from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, ConcreteBase
+from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \
+ ConcreteBase, has_inherited_table
from sqlalchemy.testing import fixtures
Base = None
@@ -1112,6 +1113,46 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
'concrete':True}
self._roundtrip(Employee, Manager, Engineer, Boss)
+
+ def test_has_inherited_table_doesnt_consider_base(self):
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+
+ assert not has_inherited_table(A)
+
+ class B(A):
+ __tablename__ = 'b'
+ id = Column(Integer, ForeignKey('a.id'), primary_key=True)
+
+ assert has_inherited_table(B)
+
+ def test_has_inherited_table_in_mapper_args(self):
+ class Test(Base):
+ __tablename__ = 'test'
+ id = Column(Integer, primary_key=True)
+ type = Column(String(20))
+
+ @declared_attr
+ def __mapper_args__(cls):
+ if not has_inherited_table(cls):
+ ret = {
+ 'polymorphic_identity': 'default',
+ 'polymorphic_on': cls.type,
+ }
+ else:
+ ret = {'polymorphic_identity': cls.__name__}
+ return ret
+
+ class PolyTest(Test):
+ __tablename__ = 'poly_test'
+ id = Column(Integer, ForeignKey(Test.id), primary_key=True)
+
+ configure_mappers()
+
+ assert Test.__mapper__.polymorphic_on is Test.__table__.c.type
+ assert PolyTest.__mapper__.polymorphic_on is Test.__table__.c.type
+
def test_ok_to_override_type_from_abstract(self):
class Employee(AbstractConcreteBase, Base, fixtures.ComparableEntity):
pass
diff --git a/test/ext/test_serializer.py b/test/ext/test_serializer.py
index bf268fbbb..34d7d45e0 100644
--- a/test/ext/test_serializer.py
+++ b/test/ext/test_serializer.py
@@ -92,6 +92,8 @@ class SerializeTest(fixtures.MappedTest):
@testing.requires.python26 # namedtuple workaround not serializable in 2.5
@testing.skip_if(lambda: util.pypy, "pickle sometimes has "
"problems here, sometimes not")
+ @testing.skip_if("postgresql", "Having intermittent problems on jenkins "
+ "with this test, it's really not that important")
def test_query(self):
q = Session.query(User).filter(User.name == 'ed'
).options(joinedload(User.addresses))
diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py
index 66991e922..f883a07a7 100644
--- a/test/orm/inheritance/test_basic.py
+++ b/test/orm/inheritance/test_basic.py
@@ -1055,6 +1055,73 @@ class FlushTest(fixtures.MappedTest):
sess.flush()
assert user_roles.count().scalar() == 1
+class JoinedNoFKSortingTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table("a", metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ )
+ Table("b", metadata,
+ Column('id', Integer, primary_key=True)
+ )
+ Table("c", metadata,
+ Column('id', Integer, primary_key=True)
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Basic):
+ pass
+ class B(A):
+ pass
+ class C(A):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ A, B, C = cls.classes.A, cls.classes.B, cls.classes.C
+ mapper(A, cls.tables.a)
+ mapper(B, cls.tables.b, inherits=A,
+ inherit_condition=cls.tables.a.c.id == cls.tables.b.c.id)
+ mapper(C, cls.tables.c, inherits=A,
+ inherit_condition=cls.tables.a.c.id == cls.tables.c.c.id)
+
+ def test_ordering(self):
+ B, C = self.classes.B, self.classes.C
+ sess = Session()
+ sess.add_all([B(), C(), B(), C()])
+ self.assert_sql_execution(
+ testing.db,
+ sess.flush,
+ CompiledSQL(
+ "INSERT INTO a () VALUES ()",
+ {}
+ ),
+ CompiledSQL(
+ "INSERT INTO a () VALUES ()",
+ {}
+ ),
+ CompiledSQL(
+ "INSERT INTO a () VALUES ()",
+ {}
+ ),
+ CompiledSQL(
+ "INSERT INTO a () VALUES ()",
+ {}
+ ),
+ AllOf(
+ CompiledSQL(
+ "INSERT INTO b (id) VALUES (:id)",
+ [{"id": 1}, {"id": 3}]
+ ),
+ CompiledSQL(
+ "INSERT INTO c (id) VALUES (:id)",
+ [{"id": 2}, {"id": 4}]
+ )
+ )
+ )
+
class VersioningTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -1570,6 +1637,53 @@ class OptimizedLoadTest(fixtures.MappedTest):
Column('b', String(10))
)
+ def test_no_optimize_on_map_to_join(self):
+ base, sub = self.tables.base, self.tables.sub
+
+ class Base(fixtures.ComparableEntity):
+ pass
+
+ class JoinBase(fixtures.ComparableEntity):
+ pass
+ class SubJoinBase(JoinBase):
+ pass
+
+ mapper(Base, base)
+ mapper(JoinBase, base.outerjoin(sub), properties={
+ 'id': [base.c.id, sub.c.id],
+ 'counter': [base.c.counter, sub.c.counter]
+ })
+ mapper(SubJoinBase, inherits=JoinBase)
+
+ sess = Session()
+ sess.add(Base(data='data'))
+ sess.commit()
+
+ sjb = sess.query(SubJoinBase).one()
+ sjb_id = sjb.id
+ sess.expire(sjb)
+
+ # this should not use the optimized load,
+ # which assumes discrete tables
+ def go():
+ eq_(sjb.data, 'data')
+
+ self.assert_sql_execution(
+ testing.db,
+ go,
+ CompiledSQL(
+ "SELECT base.counter AS base_counter, "
+ "sub.counter AS sub_counter, base.id AS base_id, "
+ "sub.id AS sub_id, base.data AS base_data, "
+ "base.type AS base_type, sub.sub AS sub_sub, "
+ "sub.counter2 AS sub_counter2 FROM base "
+ "LEFT OUTER JOIN sub ON base.id = sub.id "
+ "WHERE base.id = :param_1",
+ {'param_1': sjb_id}
+ ),
+ )
+
+
def test_optimized_passes(self):
""""test that the 'optimized load' routine doesn't crash when
a column in the join condition is not available."""
@@ -1611,7 +1725,7 @@ class OptimizedLoadTest(fixtures.MappedTest):
pass
mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
mapper(Sub, sub, inherits=Base, polymorphic_identity='sub', properties={
- 'concat':column_property(sub.c.sub + "|" + sub.c.sub)
+ 'concat': column_property(sub.c.sub + "|" + sub.c.sub)
})
sess = sessionmaker()()
s1 = Sub(data='s1data', sub='s1sub')
@@ -1630,7 +1744,7 @@ class OptimizedLoadTest(fixtures.MappedTest):
pass
mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
mapper(Sub, sub, inherits=Base, polymorphic_identity='sub', properties={
- 'concat':column_property(base.c.data + "|" + sub.c.sub)
+ 'concat': column_property(base.c.data + "|" + sub.c.sub)
})
sess = sessionmaker()()
s1 = Sub(data='s1data', sub='s1sub')
diff --git a/test/orm/inheritance/test_polymorphic_rel.py b/test/orm/inheritance/test_polymorphic_rel.py
index e22848912..1b9acb787 100644
--- a/test/orm/inheritance/test_polymorphic_rel.py
+++ b/test/orm/inheritance/test_polymorphic_rel.py
@@ -650,6 +650,7 @@ class _PolymorphicTestBase(object):
count = 5
self.assert_sql_count(testing.db, go, count)
+
def test_joinedload_on_subclass(self):
sess = create_session()
expected = [
diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py
index 1fc70fd77..d60c55edd 100644
--- a/test/orm/test_attributes.py
+++ b/test/orm/test_attributes.py
@@ -1170,7 +1170,10 @@ class CyclicBackrefAssertionTest(fixtures.TestBase):
b1 = B()
assert_raises_message(
ValueError,
- "Object <B at .*> not associated with attribute of type C.a",
+ 'Bidirectional attribute conflict detected: '
+ 'Passing object <B at .*> to attribute "C.a" '
+ 'triggers a modify event on attribute "C.b" '
+ 'via the backref "B.c".',
setattr, c1, 'a', b1
)
@@ -1180,10 +1183,14 @@ class CyclicBackrefAssertionTest(fixtures.TestBase):
b1 = B()
assert_raises_message(
ValueError,
- "Object <B at .*> not associated with attribute of type C.a",
+ 'Bidirectional attribute conflict detected: '
+ 'Passing object <B at .*> to attribute "C.a" '
+ 'triggers a modify event on attribute "C.b" '
+ 'via the backref "B.c".',
c1.a.append, b1
)
+
def _scalar_fixture(self):
class A(object):
pass
@@ -1225,6 +1232,36 @@ class CyclicBackrefAssertionTest(fixtures.TestBase):
return A, B, C
+ def _broken_collection_fixture(self):
+ class A(object):
+ pass
+ class B(object):
+ pass
+ instrumentation.register_class(A)
+ instrumentation.register_class(B)
+
+ attributes.register_attribute(A, 'b', backref='a1', useobject=True)
+ attributes.register_attribute(B, 'a1', backref='b', useobject=True,
+ uselist=True)
+
+ attributes.register_attribute(B, 'a2', backref='b', useobject=True,
+ uselist=True)
+
+ return A, B
+
+ def test_broken_collection_assertion(self):
+ A, B = self._broken_collection_fixture()
+ b1 = B()
+ a1 = A()
+ assert_raises_message(
+ ValueError,
+ 'Bidirectional attribute conflict detected: '
+ 'Passing object <A at .*> to attribute "B.a2" '
+ 'triggers a modify event on attribute "B.a1" '
+ 'via the backref "A.b".',
+ b1.a2.append, a1
+ )
+
class PendingBackrefTest(fixtures.ORMTest):
def setup(self):
global Post, Blog, called, lazy_load
diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py
index 00d19e792..12196b4e7 100644
--- a/test/orm/test_cascade.py
+++ b/test/orm/test_cascade.py
@@ -37,6 +37,22 @@ class CascadeArgTest(fixtures.MappedTest):
class Address(cls.Basic):
pass
+ def test_delete_with_passive_deletes_all(self):
+ User, Address = self.classes.User, self.classes.Address
+ users, addresses = self.tables.users, self.tables.addresses
+
+ mapper(User, users, properties={
+ 'addresses': relationship(Address,
+ passive_deletes="all", cascade="all, delete-orphan")
+ })
+ mapper(Address, addresses)
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "On User.addresses, can't set passive_deletes='all' "
+ "in conjunction with 'delete' or 'delete-orphan' cascade",
+ configure_mappers
+ )
+
def test_delete_orphan_without_delete(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
@@ -69,6 +85,33 @@ class CascadeArgTest(fixtures.MappedTest):
orm_util.CascadeOptions("all, delete-orphan"),
frozenset)
+ def test_cascade_assignable(self):
+ User, Address = self.classes.User, self.classes.Address
+ users, addresses = self.tables.users, self.tables.addresses
+
+ rel = relationship(Address)
+ eq_(rel.cascade, set(['save-update', 'merge']))
+ rel.cascade = "save-update, merge, expunge"
+ eq_(rel.cascade, set(['save-update', 'merge', 'expunge']))
+
+ mapper(User, users, properties={
+ 'addresses': rel
+ })
+ am = mapper(Address, addresses)
+ configure_mappers()
+
+ eq_(rel.cascade, set(['save-update', 'merge', 'expunge']))
+
+ assert ("addresses", User) not in am._delete_orphans
+ rel.cascade = "all, delete, delete-orphan"
+ assert ("addresses", User) in am._delete_orphans
+
+ eq_(rel.cascade,
+ set(['delete', 'delete-orphan', 'expunge', 'merge',
+ 'refresh-expire', 'save-update'])
+ )
+
+
class O2MCascadeDeleteOrphanTest(fixtures.MappedTest):
run_inserts = None
diff --git a/test/orm/test_compile.py b/test/orm/test_compile.py
index fb32fb0b9..ad6778e97 100644
--- a/test/orm/test_compile.py
+++ b/test/orm/test_compile.py
@@ -167,8 +167,10 @@ class CompileTest(fixtures.ORMTest):
b = Table('b', meta, Column('id', Integer, primary_key=True),
Column('a_id', Integer, ForeignKey('a.id')))
- class A(object):pass
- class B(object):pass
+ class A(object):
+ pass
+ class B(object):
+ pass
mapper(A, a, properties={
'b':relationship(B, backref='a')
@@ -183,3 +185,29 @@ class CompileTest(fixtures.ORMTest):
configure_mappers
)
+ def test_conflicting_backref_subclass(self):
+ meta = MetaData()
+
+ a = Table('a', meta, Column('id', Integer, primary_key=True))
+ b = Table('b', meta, Column('id', Integer, primary_key=True),
+ Column('a_id', Integer, ForeignKey('a.id')))
+
+ class A(object):
+ pass
+ class B(object):
+ pass
+ class C(B):
+ pass
+
+ mapper(A, a, properties={
+ 'b': relationship(B, backref='a'),
+ 'c': relationship(C, backref='a')
+ })
+ mapper(B, b)
+ mapper(C, None, inherits=B)
+
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Error creating backref",
+ configure_mappers
+ )
diff --git a/test/orm/test_default_strategies.py b/test/orm/test_default_strategies.py
index b986ac568..c1668cdd4 100644
--- a/test/orm/test_default_strategies.py
+++ b/test/orm/test_default_strategies.py
@@ -2,7 +2,6 @@ from test.orm import _fixtures
from sqlalchemy import testing
from sqlalchemy.orm import mapper, relationship, create_session
from sqlalchemy import util
-from sqlalchemy.util import any
import sqlalchemy as sa
from sqlalchemy.testing import eq_, assert_raises_message
diff --git a/test/orm/test_froms.py b/test/orm/test_froms.py
index 4c566948a..c701a7076 100644
--- a/test/orm/test_froms.py
+++ b/test/orm/test_froms.py
@@ -174,9 +174,7 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL):
)
# a little tedious here, adding labels to work around Query's
- # auto-labelling. TODO: can we detect only one table in the
- # "froms" and then turn off use_labels ? note: this query is
- # incorrect SQL with the correlate of users in the FROM list.
+ # auto-labelling.
s = sess.query(addresses.c.id.label('id'),
addresses.c.email_address.label('email')).\
filter(addresses.c.user_id == users.c.id).correlate(users).\
@@ -188,7 +186,7 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL):
"SELECT users.id AS users_id, users.name AS users_name, "
"anon_1.email AS anon_1_email "
"FROM users JOIN (SELECT addresses.id AS id, "
- "addresses.email_address AS email FROM addresses "
+ "addresses.email_address AS email FROM addresses, users "
"WHERE addresses.user_id = users.id) AS anon_1 "
"ON anon_1.id = users.id",
)
@@ -2322,3 +2320,64 @@ class TestOverlyEagerEquivalentCols(fixtures.MappedTest):
filter(Sub1.id==1).one(),
b1
)
+
+class LabelCollideTest(fixtures.MappedTest):
+ """Test handling for a label collision. This collision
+ is handled by core, see ticket:2702 as well as
+ test/sql/test_selectable->WithLabelsTest. here we want
+ to make sure the end result is as we expect.
+
+ """
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('bar_id', Integer)
+ )
+ Table('foo_bar', metadata,
+ Column('id', Integer, primary_key=True),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Foo(cls.Basic):
+ pass
+ class Bar(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.Foo, cls.tables.foo)
+ mapper(cls.classes.Bar, cls.tables.foo_bar)
+
+ @classmethod
+ def insert_data(cls):
+ s = Session()
+ s.add_all([
+ cls.classes.Foo(id=1, bar_id=2),
+ cls.classes.Bar(id=3)
+ ])
+ s.commit()
+
+ def test_overlap_plain(self):
+ s = Session()
+ row = s.query(self.classes.Foo, self.classes.Bar).all()[0]
+ def go():
+ eq_(row.Foo.id, 1)
+ eq_(row.Foo.bar_id, 2)
+ eq_(row.Bar.id, 3)
+ # all three columns are loaded independently without
+ # overlap, no additional SQL to load all attributes
+ self.assert_sql_count(testing.db, go, 0)
+
+ def test_overlap_subquery(self):
+ s = Session()
+ row = s.query(self.classes.Foo, self.classes.Bar).from_self().all()[0]
+ def go():
+ eq_(row.Foo.id, 1)
+ eq_(row.Foo.bar_id, 2)
+ eq_(row.Bar.id, 3)
+ # all three columns are loaded independently without
+ # overlap, no additional SQL to load all attributes
+ self.assert_sql_count(testing.db, go, 0) \ No newline at end of file
diff --git a/test/orm/test_instrumentation.py b/test/orm/test_instrumentation.py
index 3b548f0cd..3f8fc67b6 100644
--- a/test/orm/test_instrumentation.py
+++ b/test/orm/test_instrumentation.py
@@ -445,6 +445,20 @@ class MapperInitTest(fixtures.ORMTest):
# C is not mapped in the current implementation
assert_raises(sa.orm.exc.UnmappedClassError, class_mapper, C)
+ def test_del_warning(self):
+ class A(object):
+ def __del__(self):
+ pass
+
+ assert_raises_message(
+ sa.exc.SAWarning,
+ r"__del__\(\) method on class "
+ "<class 'test.orm.test_instrumentation.A'> will cause "
+ "unreachable cycles and memory leaks, as SQLAlchemy "
+ "instrumentation often creates reference cycles. "
+ "Please remove this method.",
+ mapper, A, self.fixture()
+ )
class OnLoadTest(fixtures.ORMTest):
"""Check that Events.load is not hit in regular attributes operations."""
diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py
index 8fd38a680..4c0a193a0 100644
--- a/test/orm/test_joins.py
+++ b/test/orm/test_joins.py
@@ -2105,7 +2105,7 @@ class SelfReferentialM2MTest(fixtures.MappedTest):
sess = create_session()
eq_(sess.query(Node).filter(Node.children.any(Node.data == 'n3'
- )).all(), [Node(data='n1'), Node(data='n2')])
+ )).order_by(Node.data).all(), [Node(data='n1'), Node(data='n2')])
def test_contains(self):
Node = self.classes.Node
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 8c5b9cd84..6b97fb135 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -407,6 +407,37 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
obj.info["q"] = "p"
eq_(obj.info, {"q": "p"})
+ def test_info_via_instrumented(self):
+ m = MetaData()
+ # create specific tables here as we don't want
+ # users.c.id.info to be pre-initialized
+ users = Table('u', m, Column('id', Integer, primary_key=True),
+ Column('name', String))
+ addresses = Table('a', m, Column('id', Integer, primary_key=True),
+ Column('name', String),
+ Column('user_id', Integer, ForeignKey('u.id')))
+ Address = self.classes.Address
+ User = self.classes.User
+
+ mapper(User, users, properties={
+ "name_lower": column_property(func.lower(users.c.name)),
+ "addresses": relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ # attr.info goes down to the original Column object
+ # for the dictionary. The annotated element needs to pass
+ # this on.
+ assert 'info' not in users.c.id.__dict__
+ is_(User.id.info, users.c.id.info)
+ assert 'info' in users.c.id.__dict__
+
+ # for SQL expressions, ORM-level .info
+ is_(User.name_lower.info, User.name_lower.property.info)
+
+ # same for relationships
+ is_(User.addresses.info, User.addresses.property.info)
+
def test_add_property(self):
users, addresses, Address = (self.tables.users,
@@ -488,7 +519,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert hasattr(User, 'addresses')
assert "addresses" in [p.key for p in m1._polymorphic_properties]
- def test_replace_property(self):
+ def test_replace_col_prop_w_syn(self):
users, User = self.tables.users, self.classes.User
m = mapper(User, users)
@@ -514,6 +545,24 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
u.name = 'jacko'
assert m._columntoproperty[users.c.name] is m.get_property('_name')
+ def test_replace_rel_prop_with_rel_warns(self):
+ users, User = self.tables.users, self.classes.User
+ addresses, Address = self.tables.addresses, self.classes.Address
+
+ m = mapper(User, users, properties={
+ "addresses": relationship(Address)
+ })
+ mapper(Address, addresses)
+
+ assert_raises_message(
+ sa.exc.SAWarning,
+ "Property User.addresses on Mapper|User|users being replaced "
+ "with new property User.addresses; the old property will "
+ "be discarded",
+ m.add_property,
+ "addresses", relationship(Address)
+ )
+
def test_add_column_prop_deannotate(self):
User, users = self.classes.User, self.tables.users
Address, addresses = self.classes.Address, self.tables.addresses
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index f418d2581..ac9c95f41 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -194,22 +194,33 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL):
Address = self.classes.Address
self.assert_compile(
- select([User]).where(User.id == Address.user_id).
- correlate(Address),
- "SELECT users.id, users.name FROM users "
- "WHERE users.id = addresses.user_id"
+ select([User.name, Address.id,
+ select([func.count(Address.id)]).\
+ where(User.id == Address.user_id).\
+ correlate(User).as_scalar()
+ ]),
+ "SELECT users.name, addresses.id, "
+ "(SELECT count(addresses.id) AS count_1 "
+ "FROM addresses WHERE users.id = addresses.user_id) AS anon_1 "
+ "FROM users, addresses"
)
def test_correlate_aliased_entity(self):
User = self.classes.User
Address = self.classes.Address
- aa = aliased(Address, name="aa")
+ uu = aliased(User, name="uu")
self.assert_compile(
- select([User]).where(User.id == aa.user_id).
- correlate(aa),
- "SELECT users.id, users.name FROM users "
- "WHERE users.id = aa.user_id"
+ select([uu.name, Address.id,
+ select([func.count(Address.id)]).\
+ where(uu.id == Address.user_id).\
+ correlate(uu).as_scalar()
+ ]),
+ # curious, "address.user_id = uu.id" is reversed here
+ "SELECT uu.name, addresses.id, "
+ "(SELECT count(addresses.id) AS count_1 "
+ "FROM addresses WHERE addresses.user_id = uu.id) AS anon_1 "
+ "FROM users AS uu, addresses"
)
def test_columns_clause_entity(self):
diff --git a/test/orm/test_rel_fn.py b/test/orm/test_rel_fn.py
index bad3a0dd7..10ba41429 100644
--- a/test/orm/test_rel_fn.py
+++ b/test/orm/test_rel_fn.py
@@ -1,4 +1,4 @@
-from sqlalchemy.testing import assert_raises, assert_raises_message, eq_, \
+from sqlalchemy.testing import assert_raises_message, eq_, \
AssertsCompiledSQL, is_
from sqlalchemy.testing import fixtures
from sqlalchemy.orm import relationships, foreign, remote
@@ -119,9 +119,9 @@ class _JoinFixtures(object):
support_sync=False,
can_be_synced_fn=_can_sync,
primaryjoin=and_(
- self.three_tab_a.c.id==self.three_tab_b.c.aid,
- self.three_tab_c.c.bid==self.three_tab_b.c.id,
- self.three_tab_c.c.aid==self.three_tab_a.c.id
+ self.three_tab_a.c.id == self.three_tab_b.c.aid,
+ self.three_tab_c.c.bid == self.three_tab_b.c.id,
+ self.three_tab_c.c.aid == self.three_tab_a.c.id
)
)
@@ -215,9 +215,9 @@ class _JoinFixtures(object):
self.composite_selfref,
self.composite_selfref,
primaryjoin=and_(
- self.composite_selfref.c.group_id==
+ self.composite_selfref.c.group_id ==
func.foo(self.composite_selfref.c.group_id),
- self.composite_selfref.c.parent_id==
+ self.composite_selfref.c.parent_id ==
self.composite_selfref.c.id
),
**kw
@@ -230,9 +230,9 @@ class _JoinFixtures(object):
self.composite_selfref,
self.composite_selfref,
primaryjoin=and_(
- remote(self.composite_selfref.c.group_id)==
+ remote(self.composite_selfref.c.group_id) ==
func.foo(self.composite_selfref.c.group_id),
- remote(self.composite_selfref.c.parent_id)==
+ remote(self.composite_selfref.c.parent_id) ==
self.composite_selfref.c.id
),
**kw
@@ -281,58 +281,60 @@ class _JoinFixtures(object):
# see test/orm/inheritance/test_abc_inheritance:TestaTobM2O
# and others there
right = self.base_w_sub_rel.join(self.rel_sub,
- self.base_w_sub_rel.c.id==self.rel_sub.c.id
+ self.base_w_sub_rel.c.id == self.rel_sub.c.id
)
return relationships.JoinCondition(
self.base_w_sub_rel,
right,
self.base_w_sub_rel,
self.rel_sub,
- primaryjoin=self.base_w_sub_rel.c.sub_id==\
+ primaryjoin=self.base_w_sub_rel.c.sub_id == \
self.rel_sub.c.id,
**kw
)
def _join_fixture_o2m_joined_sub_to_base(self, **kw):
left = self.base.join(self.sub_w_base_rel,
- self.base.c.id==self.sub_w_base_rel.c.id)
+ self.base.c.id == self.sub_w_base_rel.c.id)
return relationships.JoinCondition(
left,
self.base,
self.sub_w_base_rel,
self.base,
- primaryjoin=self.sub_w_base_rel.c.base_id==self.base.c.id
+ primaryjoin=self.sub_w_base_rel.c.base_id == self.base.c.id
)
def _join_fixture_m2o_joined_sub_to_sub_on_base(self, **kw):
# this is a late add - a variant of the test case
# in #2491 where we join on the base cols instead. only
# m2o has a problem at the time of this test.
- left = self.base.join(self.sub, self.base.c.id==self.sub.c.id)
- right = self.base.join(self.sub_w_base_rel, self.base.c.id==self.sub_w_base_rel.c.id)
+ left = self.base.join(self.sub, self.base.c.id == self.sub.c.id)
+ right = self.base.join(self.sub_w_base_rel,
+ self.base.c.id == self.sub_w_base_rel.c.id)
return relationships.JoinCondition(
left,
right,
self.sub,
self.sub_w_base_rel,
- primaryjoin=self.sub_w_base_rel.c.base_id==self.base.c.id,
+ primaryjoin=self.sub_w_base_rel.c.base_id == self.base.c.id,
)
def _join_fixture_o2m_joined_sub_to_sub(self, **kw):
- left = self.base.join(self.sub, self.base.c.id==self.sub.c.id)
- right = self.base.join(self.sub_w_sub_rel, self.base.c.id==self.sub_w_sub_rel.c.id)
+ left = self.base.join(self.sub, self.base.c.id == self.sub.c.id)
+ right = self.base.join(self.sub_w_sub_rel,
+ self.base.c.id == self.sub_w_sub_rel.c.id)
return relationships.JoinCondition(
left,
right,
self.sub,
self.sub_w_sub_rel,
- primaryjoin=self.sub.c.id==self.sub_w_sub_rel.c.sub_id
+ primaryjoin=self.sub.c.id == self.sub_w_sub_rel.c.sub_id
)
def _join_fixture_m2o_sub_to_joined_sub(self, **kw):
# see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
right = self.base.join(self.right_w_base_rel,
- self.base.c.id==self.right_w_base_rel.c.id)
+ self.base.c.id == self.right_w_base_rel.c.id)
return relationships.JoinCondition(
self.right_w_base_rel,
right,
@@ -343,19 +345,19 @@ class _JoinFixtures(object):
def _join_fixture_m2o_sub_to_joined_sub_func(self, **kw):
# see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
right = self.base.join(self.right_w_base_rel,
- self.base.c.id==self.right_w_base_rel.c.id)
+ self.base.c.id == self.right_w_base_rel.c.id)
return relationships.JoinCondition(
self.right_w_base_rel,
right,
self.right_w_base_rel,
self.right_w_base_rel,
- primaryjoin=self.right_w_base_rel.c.base_id==\
+ primaryjoin=self.right_w_base_rel.c.base_id == \
func.foo(self.base.c.id)
)
def _join_fixture_o2o_joined_sub_to_base(self, **kw):
left = self.base.join(self.sub,
- self.base.c.id==self.sub.c.id)
+ self.base.c.id == self.sub.c.id)
# see test_relationships->AmbiguousJoinInterpretedAsSelfRef
return relationships.JoinCondition(
@@ -371,7 +373,7 @@ class _JoinFixtures(object):
self.right,
self.left,
self.right,
- primaryjoin=self.left.c.id==
+ primaryjoin=self.left.c.id ==
foreign(func.foo(self.right.c.lid)),
**kw
)
@@ -382,7 +384,7 @@ class _JoinFixtures(object):
self.right,
self.left,
self.right,
- primaryjoin=self.left.c.id==
+ primaryjoin=self.left.c.id ==
func.foo(self.right.c.lid),
consider_as_foreign_keys=[self.right.c.lid],
**kw
@@ -399,7 +401,7 @@ class _JoinFixtures(object):
)
def _assert_raises_no_relevant_fks(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
exc.ArgumentError,
r"Could not locate any relevant foreign key columns "
@@ -414,9 +416,9 @@ class _JoinFixtures(object):
)
def _assert_raises_no_equality(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
- sa.exc.ArgumentError,
+ exc.ArgumentError,
"Could not locate any simple equality expressions "
"involving locally mapped foreign key columns for %s join "
"condition '%s' on relationship %s. "
@@ -431,7 +433,7 @@ class _JoinFixtures(object):
)
def _assert_raises_ambig_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.AmbiguousForeignKeysError,
@@ -455,7 +457,7 @@ class _JoinFixtures(object):
fn, *arg, **kw)
def _assert_raises_no_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.NoForeignKeysError,
@@ -463,7 +465,8 @@ class _JoinFixtures(object):
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables "
"via secondary table '%s'. "
- "Ensure that referencing columns are associated with a ForeignKey "
+ "Ensure that referencing columns are associated "
+ "with a ForeignKey "
"or ForeignKeyConstraint, or specify 'primaryjoin' and "
"'secondaryjoin' expressions"
% (relname, secondary_arg),
@@ -474,14 +477,16 @@ class _JoinFixtures(object):
"Could not determine join condition between "
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables. "
- "Ensure that referencing columns are associated with a ForeignKey "
+ "Ensure that referencing columns are associated "
+ "with a ForeignKey "
"or ForeignKeyConstraint, or specify a 'primaryjoin' "
"expression."
% (relname,),
fn, *arg, **kw)
-class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
+class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase,
+ AssertsCompiledSQL):
def test_determine_local_remote_pairs_o2o_joined_sub_to_base(self):
joincond = self._join_fixture_o2o_joined_sub_to_base()
eq_(
@@ -580,7 +585,7 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
]
)
- def test_determine_local_remote_compound_1(self):
+ def test_determine_local_remote_compound_3(self):
joincond = self._join_fixture_compound_expression_1()
eq_(
joincond.local_remote_pairs,
@@ -627,8 +632,10 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
eq_(
joincond.local_remote_pairs,
[
- (self.composite_selfref.c.group_id, self.composite_selfref.c.group_id),
- (self.composite_selfref.c.id, self.composite_selfref.c.parent_id),
+ (self.composite_selfref.c.group_id,
+ self.composite_selfref.c.group_id),
+ (self.composite_selfref.c.id,
+ self.composite_selfref.c.parent_id),
]
)
@@ -647,8 +654,10 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
eq_(
joincond.local_remote_pairs,
[
- (self.composite_selfref.c.group_id, self.composite_selfref.c.group_id),
- (self.composite_selfref.c.id, self.composite_selfref.c.parent_id),
+ (self.composite_selfref.c.group_id,
+ self.composite_selfref.c.group_id),
+ (self.composite_selfref.c.id,
+ self.composite_selfref.c.parent_id),
]
)
@@ -713,8 +722,8 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL
eq_(
j2.local_remote_pairs,
[
- (self.m2mright.c.id, self.m2msecondary.c.rid),
- (self.m2mleft.c.id, self.m2msecondary.c.lid),
+ (self.m2mright.c.id, self.m2msecondary.c.rid),
+ (self.m2mleft.c.id, self.m2msecondary.c.lid),
]
)
@@ -997,19 +1006,22 @@ class AdaptedJoinTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
)
class LazyClauseTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL):
+ __dialect__ = 'default'
- def _test_lazy_clause_o2m(self):
+ def test_lazy_clause_o2m(self):
joincond = self._join_fixture_o2m()
+ lazywhere, bind_to_col, equated_columns = joincond.create_lazy_clause()
self.assert_compile(
- relationships.create_lazy_clause(joincond),
- ""
+ lazywhere,
+ ":param_1 = rgt.lid"
)
- def _test_lazy_clause_o2m_reverse(self):
+ def test_lazy_clause_o2m_reverse(self):
joincond = self._join_fixture_o2m()
+ lazywhere, bind_to_col, equated_columns =\
+ joincond.create_lazy_clause(reverse_direction=True)
self.assert_compile(
- relationships.create_lazy_clause(joincond,
- reverse_direction=True),
- ""
+ lazywhere,
+ "lft.id = :param_1"
)
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 5c8968842..7c2e8a3b8 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -857,6 +857,150 @@ class SessionStateWFixtureTest(_fixtures.FixtureTest):
assert sa.orm.attributes.instance_state(a).session_id is None
+class NoCyclesOnTransientDetachedTest(_fixtures.FixtureTest):
+ """Test the instance_state._strong_obj link that it
+ is present only on persistent/pending objects and never
+ transient/detached.
+
+ """
+ run_inserts = None
+
+ def setup(self):
+ mapper(self.classes.User, self.tables.users)
+
+ def _assert_modified(self, u1):
+ assert sa.orm.attributes.instance_state(u1).modified
+
+ def _assert_not_modified(self, u1):
+ assert not sa.orm.attributes.instance_state(u1).modified
+
+ def _assert_cycle(self, u1):
+ assert sa.orm.attributes.instance_state(u1)._strong_obj is not None
+
+ def _assert_no_cycle(self, u1):
+ assert sa.orm.attributes.instance_state(u1)._strong_obj is None
+
+ def _persistent_fixture(self):
+ User = self.classes.User
+ u1 = User()
+ u1.name = "ed"
+ sess = Session()
+ sess.add(u1)
+ sess.flush()
+ return sess, u1
+
+ def test_transient(self):
+ User = self.classes.User
+ u1 = User()
+ u1.name = 'ed'
+ self._assert_no_cycle(u1)
+ self._assert_modified(u1)
+
+ def test_transient_to_pending(self):
+ User = self.classes.User
+ u1 = User()
+ u1.name = 'ed'
+ self._assert_modified(u1)
+ self._assert_no_cycle(u1)
+ sess = Session()
+ sess.add(u1)
+ self._assert_cycle(u1)
+ sess.flush()
+ self._assert_no_cycle(u1)
+ self._assert_not_modified(u1)
+
+ def test_dirty_persistent_to_detached_via_expunge(self):
+ sess, u1 = self._persistent_fixture()
+ u1.name = 'edchanged'
+ self._assert_cycle(u1)
+ sess.expunge(u1)
+ self._assert_no_cycle(u1)
+
+ def test_dirty_persistent_to_detached_via_close(self):
+ sess, u1 = self._persistent_fixture()
+ u1.name = 'edchanged'
+ self._assert_cycle(u1)
+ sess.close()
+ self._assert_no_cycle(u1)
+
+ def test_clean_persistent_to_detached_via_close(self):
+ sess, u1 = self._persistent_fixture()
+ self._assert_no_cycle(u1)
+ self._assert_not_modified(u1)
+ sess.close()
+ u1.name = 'edchanged'
+ self._assert_modified(u1)
+ self._assert_no_cycle(u1)
+
+ def test_detached_to_dirty_deleted(self):
+ sess, u1 = self._persistent_fixture()
+ sess.expunge(u1)
+ u1.name = 'edchanged'
+ self._assert_no_cycle(u1)
+ sess.delete(u1)
+ self._assert_cycle(u1)
+
+ def test_detached_to_dirty_persistent(self):
+ sess, u1 = self._persistent_fixture()
+ sess.expunge(u1)
+ u1.name = 'edchanged'
+ self._assert_modified(u1)
+ self._assert_no_cycle(u1)
+ sess.add(u1)
+ self._assert_cycle(u1)
+ self._assert_modified(u1)
+
+ def test_detached_to_clean_persistent(self):
+ sess, u1 = self._persistent_fixture()
+ sess.expunge(u1)
+ self._assert_no_cycle(u1)
+ self._assert_not_modified(u1)
+ sess.add(u1)
+ self._assert_no_cycle(u1)
+ self._assert_not_modified(u1)
+
+ def test_move_persistent_clean(self):
+ sess, u1 = self._persistent_fixture()
+ sess.close()
+ s2 = Session()
+ s2.add(u1)
+ self._assert_no_cycle(u1)
+ self._assert_not_modified(u1)
+
+ def test_move_persistent_dirty(self):
+ sess, u1 = self._persistent_fixture()
+ u1.name = 'edchanged'
+ self._assert_cycle(u1)
+ self._assert_modified(u1)
+ sess.close()
+ self._assert_no_cycle(u1)
+ s2 = Session()
+ s2.add(u1)
+ self._assert_cycle(u1)
+ self._assert_modified(u1)
+
+ @testing.requires.predictable_gc
+ def test_move_gc_session_persistent_dirty(self):
+ sess, u1 = self._persistent_fixture()
+ u1.name = 'edchanged'
+ self._assert_cycle(u1)
+ self._assert_modified(u1)
+ del sess
+ gc_collect()
+ self._assert_cycle(u1)
+ s2 = Session()
+ s2.add(u1)
+ self._assert_cycle(u1)
+ self._assert_modified(u1)
+
+ def test_persistent_dirty_to_expired(self):
+ sess, u1 = self._persistent_fixture()
+ u1.name = 'edchanged'
+ self._assert_cycle(u1)
+ self._assert_modified(u1)
+ sess.expire(u1)
+ self._assert_no_cycle(u1)
+ self._assert_not_modified(u1)
class WeakIdentityMapTest(_fixtures.FixtureTest):
run_inserts = None
diff --git a/test/orm/test_subquery_relations.py b/test/orm/test_subquery_relations.py
index a4cc830ee..3ee94cae9 100644
--- a/test/orm/test_subquery_relations.py
+++ b/test/orm/test_subquery_relations.py
@@ -976,6 +976,166 @@ class OrderBySecondaryTest(fixtures.MappedTest):
])
self.assert_sql_count(testing.db, go, 2)
+
+from .inheritance._poly_fixtures import _Polymorphic, Person, Engineer, Paperwork
+
+class BaseRelationFromJoinedSubclassTest(_Polymorphic):
+ @classmethod
+ def define_tables(cls, metadata):
+ people = Table('people', metadata,
+ Column('person_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(30)))
+
+ # to test fully, PK of engineers table must be
+ # named differently from that of people
+ engineers = Table('engineers', metadata,
+ Column('engineer_id', Integer,
+ ForeignKey('people.person_id'),
+ primary_key=True),
+ Column('primary_language', String(50)))
+
+ paperwork = Table('paperwork', metadata,
+ Column('paperwork_id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('description', String(50)),
+ Column('person_id', Integer,
+ ForeignKey('people.person_id')))
+
+ @classmethod
+ def setup_mappers(cls):
+ people = cls.tables.people
+ engineers = cls.tables.engineers
+ paperwork = cls.tables.paperwork
+
+ mapper(Person, people,
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person',
+ properties={
+ 'paperwork': relationship(
+ Paperwork, order_by=paperwork.c.paperwork_id)})
+
+ mapper(Engineer, engineers,
+ inherits=Person,
+ polymorphic_identity='engineer')
+
+ mapper(Paperwork, paperwork)
+
+ @classmethod
+ def insert_data(cls):
+
+ e1 = Engineer(primary_language="java")
+ e2 = Engineer(primary_language="c++")
+ e1.paperwork = [Paperwork(description="tps report #1"),
+ Paperwork(description="tps report #2")]
+ e2.paperwork = [Paperwork(description="tps report #3")]
+ sess = create_session()
+ sess.add_all([e1, e2])
+ sess.flush()
+
+ def test_correct_subquery_nofrom(self):
+ sess = create_session()
+ # use Person.paperwork here just to give the least
+ # amount of context
+ q = sess.query(Engineer).\
+ filter(Engineer.primary_language == 'java').\
+ options(subqueryload(Person.paperwork))
+ def go():
+ eq_(q.all()[0].paperwork,
+ [Paperwork(description="tps report #1"),
+ Paperwork(description="tps report #2")],
+
+ )
+ self.assert_sql_execution(
+ testing.db,
+ go,
+ CompiledSQL(
+ "SELECT people.person_id AS people_person_id, "
+ "people.name AS people_name, people.type AS people_type, "
+ "engineers.engineer_id AS engineers_engineer_id, "
+ "engineers.primary_language AS engineers_primary_language "
+ "FROM people JOIN engineers ON "
+ "people.person_id = engineers.engineer_id "
+ "WHERE engineers.primary_language = :primary_language_1",
+ {"primary_language_1": "java"}
+ ),
+ # ensure we get "people JOIN engineer" here, even though
+ # primary key "people.person_id" is against "Person"
+ # *and* the path comes out as "Person.paperwork", still
+ # want to select from "Engineer" entity
+ CompiledSQL(
+ "SELECT paperwork.paperwork_id AS paperwork_paperwork_id, "
+ "paperwork.description AS paperwork_description, "
+ "paperwork.person_id AS paperwork_person_id, "
+ "anon_1.people_person_id AS anon_1_people_person_id "
+ "FROM (SELECT people.person_id AS people_person_id "
+ "FROM people JOIN engineers "
+ "ON people.person_id = engineers.engineer_id "
+ "WHERE engineers.primary_language = "
+ ":primary_language_1) AS anon_1 "
+ "JOIN paperwork "
+ "ON anon_1.people_person_id = paperwork.person_id "
+ "ORDER BY anon_1.people_person_id, paperwork.paperwork_id",
+ {"primary_language_1": "java"}
+ )
+ )
+
+ def test_correct_subquery_existingfrom(self):
+ sess = create_session()
+ # use Person.paperwork here just to give the least
+ # amount of context
+ q = sess.query(Engineer).\
+ filter(Engineer.primary_language == 'java').\
+ join(Engineer.paperwork).\
+ filter(Paperwork.description == "tps report #2").\
+ options(subqueryload(Person.paperwork))
+ def go():
+ eq_(q.one().paperwork,
+ [Paperwork(description="tps report #1"),
+ Paperwork(description="tps report #2")],
+
+ )
+ self.assert_sql_execution(
+ testing.db,
+ go,
+ CompiledSQL(
+ "SELECT people.person_id AS people_person_id, "
+ "people.name AS people_name, people.type AS people_type, "
+ "engineers.engineer_id AS engineers_engineer_id, "
+ "engineers.primary_language AS engineers_primary_language "
+ "FROM people JOIN engineers "
+ "ON people.person_id = engineers.engineer_id "
+ "JOIN paperwork ON people.person_id = paperwork.person_id "
+ "WHERE engineers.primary_language = :primary_language_1 "
+ "AND paperwork.description = :description_1",
+ {"primary_language_1": "java",
+ "description_1": "tps report #2"}
+ ),
+ CompiledSQL(
+ "SELECT paperwork.paperwork_id AS paperwork_paperwork_id, "
+ "paperwork.description AS paperwork_description, "
+ "paperwork.person_id AS paperwork_person_id, "
+ "anon_1.people_person_id AS anon_1_people_person_id "
+ "FROM (SELECT people.person_id AS people_person_id "
+ "FROM people JOIN engineers ON people.person_id = "
+ "engineers.engineer_id JOIN paperwork "
+ "ON people.person_id = paperwork.person_id "
+ "WHERE engineers.primary_language = :primary_language_1 AND "
+ "paperwork.description = :description_1) AS anon_1 "
+ "JOIN paperwork ON anon_1.people_person_id = "
+ "paperwork.person_id "
+ "ORDER BY anon_1.people_person_id, paperwork.paperwork_id",
+ {"primary_language_1": "java",
+ "description_1": "tps report #2"}
+ )
+ )
+
+
+
+
class SelfReferentialTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py
index 7df6ecf91..64b05a131 100644
--- a/test/orm/test_transaction.py
+++ b/test/orm/test_transaction.py
@@ -358,18 +358,80 @@ class SessionTransactionTest(FixtureTest):
sess.begin, subtransactions=True)
sess.close()
- def test_no_sql_during_prepare(self):
+ def test_no_sql_during_commit(self):
sess = create_session(bind=testing.db, autocommit=False)
@event.listens_for(sess, "after_commit")
def go(session):
session.execute("select 1")
assert_raises_message(sa_exc.InvalidRequestError,
- "This session is in 'prepared' state, where no "
- "further SQL can be emitted until the "
- "transaction is fully committed.",
+ "This session is in 'committed' state; no further "
+ "SQL can be emitted within this transaction.",
sess.commit)
+ def test_no_sql_during_prepare(self):
+ sess = create_session(bind=testing.db, autocommit=False, twophase=True)
+
+ sess.prepare()
+
+ assert_raises_message(sa_exc.InvalidRequestError,
+ "This session is in 'prepared' state; no further "
+ "SQL can be emitted within this transaction.",
+ sess.execute, "select 1")
+
+ def test_no_prepare_wo_twophase(self):
+ sess = create_session(bind=testing.db, autocommit=False)
+
+ assert_raises_message(sa_exc.InvalidRequestError,
+ "'twophase' mode not enabled, or not root "
+ "transaction; can't prepare.",
+ sess.prepare)
+
+ def test_closed_status_check(self):
+ sess = create_session()
+ trans = sess.begin()
+ trans.rollback()
+ assert_raises_message(
+ sa_exc.ResourceClosedError,
+ "This transaction is closed",
+ trans.rollback
+ )
+ assert_raises_message(
+ sa_exc.ResourceClosedError,
+ "This transaction is closed",
+ trans.commit
+ )
+
+ def test_deactive_status_check(self):
+ sess = create_session()
+ trans = sess.begin()
+ trans2 = sess.begin(subtransactions=True)
+ trans2.rollback()
+ assert_raises_message(
+ sa_exc.InvalidRequestError,
+ "This Session's transaction has been rolled back by a nested "
+ "rollback\(\) call. To begin a new transaction, issue "
+ "Session.rollback\(\) first.",
+ trans.commit
+ )
+
+ def test_deactive_status_check_w_exception(self):
+ sess = create_session()
+ trans = sess.begin()
+ trans2 = sess.begin(subtransactions=True)
+ try:
+ raise Exception("test")
+ except:
+ trans2.rollback(_capture_exception=True)
+ assert_raises_message(
+ sa_exc.InvalidRequestError,
+ "This Session's transaction has been rolled back due to a "
+ "previous exception during flush. To begin a new transaction "
+ "with this Session, first issue Session.rollback\(\). "
+ "Original exception was: test",
+ trans.commit
+ )
+
def _inactive_flushed_session_fixture(self):
users, User = self.tables.users, self.classes.User
diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py
index 7fc728f1d..6be1672e1 100644
--- a/test/orm/test_unitofwork.py
+++ b/test/orm/test_unitofwork.py
@@ -616,19 +616,25 @@ class ExtraPassiveDeletesTest(fixtures.MappedTest):
def test_assertions(self):
myothertable, MyOtherClass = self.tables.myothertable, self.classes.MyOtherClass
+ mytable, MyClass = self.tables.mytable, self.classes.MyClass
+ mapper(MyClass, mytable, properties={
+ 'foo': relationship(MyOtherClass,
+ passive_deletes='all',
+ cascade="all")
+ })
mapper(MyOtherClass, myothertable)
+
assert_raises_message(
sa.exc.ArgumentError,
- "Can't set passive_deletes='all' in conjunction with 'delete' "
+ "On MyClass.foo, can't set passive_deletes='all' in conjunction with 'delete' "
"or 'delete-orphan' cascade",
- relationship, MyOtherClass,
- passive_deletes='all',
- cascade="all"
+ sa.orm.configure_mappers
)
def test_extra_passive(self):
- myothertable, MyClass, MyOtherClass, mytable = (self.tables.myothertable,
+ myothertable, MyClass, MyOtherClass, mytable = (
+ self.tables.myothertable,
self.classes.MyClass,
self.classes.MyOtherClass,
self.tables.mytable)
diff --git a/test/perf/stress_all.py b/test/perf/stress_all.py
index d17028530..890ef24a3 100644
--- a/test/perf/stress_all.py
+++ b/test/perf/stress_all.py
@@ -1,6 +1,6 @@
# -*- encoding: utf8 -*-
from datetime import *
-from sqlalchemy.util.compat import decimal
+import decimal
#from fastdec import mpd as Decimal
from cPickle import dumps, loads
diff --git a/test/profiles.txt b/test/profiles.txt
index d83280c2c..d465fa3be 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -33,6 +33,10 @@ test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 135
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 135
+# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
+
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 177
+
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
test.aaa_profiling.test_compiler.CompileTest.test_update 2.5_sqlite_pysqlite_nocextensions 65
@@ -107,6 +111,7 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_nocex
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_cextensions 122,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 122,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 122,18
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 122,18
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect
@@ -116,6 +121,7 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_cextensions 82
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 82
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 82
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 82
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 3b8aed23f..9cd893c1a 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -14,8 +14,8 @@ from sqlalchemy.testing import eq_, is_, assert_raises, assert_raises_message
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import Integer, String, MetaData, Table, Column, select, \
- func, not_, cast, text, tuple_, exists, delete, update, bindparam,\
- insert, literal, and_, null, type_coerce, alias, or_, literal_column,\
+ func, not_, cast, text, tuple_, exists, update, bindparam,\
+ literal, and_, null, type_coerce, alias, or_, literal_column,\
Float, TIMESTAMP, Numeric, Date, Text, collate, union, except_,\
intersect, union_all, Boolean, distinct, join, outerjoin, asc, desc,\
over, subquery, case
@@ -87,6 +87,7 @@ keyed = Table('keyed', metadata,
Column('z', Integer),
)
+
class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -424,35 +425,6 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"AS z FROM keyed) AS anon_2) AS anon_1"
)
- def test_dont_overcorrelate(self):
- self.assert_compile(select([table1], from_obj=[table1,
- table1.select()]),
- "SELECT mytable.myid, mytable.name, "
- "mytable.description FROM mytable, (SELECT "
- "mytable.myid AS myid, mytable.name AS "
- "name, mytable.description AS description "
- "FROM mytable)")
-
- def test_full_correlate(self):
- # intentional
- t = table('t', column('a'), column('b'))
- s = select([t.c.a]).where(t.c.a == 1).correlate(t).as_scalar()
-
- s2 = select([t.c.a, s])
- self.assert_compile(s2,
- "SELECT t.a, (SELECT t.a WHERE t.a = :a_1) AS anon_1 FROM t")
-
- # unintentional
- t2 = table('t2', column('c'), column('d'))
- s = select([t.c.a]).where(t.c.a == t2.c.d).as_scalar()
- s2 = select([t, t2, s])
- assert_raises(exc.InvalidRequestError, str, s2)
-
- # intentional again
- s = s.correlate(t, t2)
- s2 = select([t, t2, s])
- self.assert_compile(s, "SELECT t.a WHERE t.a = t2.d")
-
def test_exists(self):
s = select([table1.c.myid]).where(table1.c.myid == 5)
@@ -2239,14 +2211,14 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
assert_raises_message(
exc.CompileError,
- "Cannot compile Column object until it's 'name' is assigned.",
+ "Cannot compile Column object until its 'name' is assigned.",
str, sel2
)
sel3 = select([my_str]).as_scalar()
assert_raises_message(
exc.CompileError,
- "Cannot compile Column object until it's 'name' is assigned.",
+ "Cannot compile Column object until its 'name' is assigned.",
str, sel3
)
@@ -2488,326 +2460,6 @@ class KwargPropagationTest(fixtures.TestBase):
class CRUDTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
- def test_insert(self):
- # generic insert, will create bind params for all columns
- self.assert_compile(insert(table1),
- "INSERT INTO mytable (myid, name, description) "
- "VALUES (:myid, :name, :description)")
-
- # insert with user-supplied bind params for specific columns,
- # cols provided literally
- self.assert_compile(
- insert(table1, {
- table1.c.myid: bindparam('userid'),
- table1.c.name: bindparam('username')}),
- "INSERT INTO mytable (myid, name) VALUES (:userid, :username)")
-
- # insert with user-supplied bind params for specific columns, cols
- # provided as strings
- self.assert_compile(
- insert(table1, dict(myid=3, name='jack')),
- "INSERT INTO mytable (myid, name) VALUES (:myid, :name)"
- )
-
- # test with a tuple of params instead of named
- self.assert_compile(
- insert(table1, (3, 'jack', 'mydescription')),
- "INSERT INTO mytable (myid, name, description) VALUES "
- "(:myid, :name, :description)",
- checkparams={
- 'myid': 3, 'name': 'jack', 'description': 'mydescription'}
- )
-
- self.assert_compile(
- insert(table1, values={
- table1.c.myid: bindparam('userid')
- }).values(
- {table1.c.name: bindparam('username')}),
- "INSERT INTO mytable (myid, name) VALUES (:userid, :username)"
- )
-
- self.assert_compile(
- insert(table1, values=dict(myid=func.lala())),
- "INSERT INTO mytable (myid) VALUES (lala())")
-
- def test_insert_prefix(self):
- stmt = table1.insert().prefix_with("A", "B", dialect="mysql").\
- prefix_with("C", "D")
- self.assert_compile(stmt,
- "INSERT A B C D INTO mytable (myid, name, description) "
- "VALUES (%s, %s, %s)", dialect=mysql.dialect()
- )
- self.assert_compile(stmt,
- "INSERT C D INTO mytable (myid, name, description) "
- "VALUES (:myid, :name, :description)")
-
- def test_inline_default_insert(self):
- metadata = MetaData()
- table = Table('sometable', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo', Integer, default=func.foobar()))
- self.assert_compile(
- table.insert(values={}, inline=True),
- "INSERT INTO sometable (foo) VALUES (foobar())")
- self.assert_compile(
- table.insert(inline=True),
- "INSERT INTO sometable (foo) VALUES (foobar())", params={})
-
- def test_insert_returning_not_in_default(self):
- stmt = table1.insert().returning(table1.c.myid)
- assert_raises_message(
- exc.CompileError,
- "RETURNING is not supported by this dialect's statement compiler.",
- stmt.compile
- )
-
- def test_empty_insert_default(self):
- stmt = table1.insert().values({}) # hide from 2to3
- self.assert_compile(stmt, "INSERT INTO mytable () VALUES ()")
-
- def test_empty_insert_default_values(self):
- stmt = table1.insert().values({}) # hide from 2to3
- dialect = default.DefaultDialect()
- dialect.supports_empty_insert = dialect.supports_default_values = True
- self.assert_compile(stmt, "INSERT INTO mytable DEFAULT VALUES",
- dialect=dialect)
-
- def test_empty_insert_not_supported(self):
- stmt = table1.insert().values({}) # hide from 2to3
- dialect = default.DefaultDialect()
- dialect.supports_empty_insert = dialect.supports_default_values = False
- assert_raises_message(
- exc.CompileError,
- "The 'default' dialect with current database version "
- "settings does not support empty inserts.",
- stmt.compile, dialect=dialect
- )
-
- def test_multivalues_insert_not_supported(self):
- stmt = table1.insert().values([{"myid": 1}, {"myid": 2}])
- dialect = default.DefaultDialect()
- assert_raises_message(
- exc.CompileError,
- "The 'default' dialect with current database version settings "
- "does not support in-place multirow inserts.",
- stmt.compile, dialect=dialect
- )
-
- def test_multivalues_insert_named(self):
- stmt = table1.insert().\
- values([{"myid": 1, "name": 'a', "description": 'b'},
- {"myid": 2, "name": 'c', "description": 'd'},
- {"myid": 3, "name": 'e', "description": 'f'}
- ])
-
- result = "INSERT INTO mytable (myid, name, description) VALUES " \
- "(:myid_0, :name_0, :description_0), " \
- "(:myid_1, :name_1, :description_1), " \
- "(:myid_2, :name_2, :description_2)"
-
- dialect = default.DefaultDialect()
- dialect.supports_multivalues_insert = True
- self.assert_compile(stmt, result,
- checkparams={
- 'description_2': 'f', 'name_2': 'e',
- 'name_0': 'a', 'name_1': 'c', 'myid_2': 3,
- 'description_0': 'b', 'myid_0': 1,
- 'myid_1': 2, 'description_1': 'd'
- },
- dialect=dialect)
-
- def test_multivalues_insert_positional(self):
- stmt = table1.insert().\
- values([{"myid": 1, "name": 'a', "description": 'b'},
- {"myid": 2, "name": 'c', "description": 'd'},
- {"myid": 3, "name": 'e', "description": 'f'}
- ])
-
- result = "INSERT INTO mytable (myid, name, description) VALUES " \
- "(%s, %s, %s), " \
- "(%s, %s, %s), " \
- "(%s, %s, %s)" \
-
- dialect = default.DefaultDialect()
- dialect.supports_multivalues_insert = True
- dialect.paramstyle = "format"
- dialect.positional = True
- self.assert_compile(stmt, result,
- checkpositional=(1, 'a', 'b', 2, 'c', 'd', 3, 'e', 'f'),
- dialect=dialect)
-
- def test_multirow_inline_default_insert(self):
- metadata = MetaData()
- table = Table('sometable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String),
- Column('foo', Integer, default=func.foobar()))
-
- stmt = table.insert().\
- values([
- {"id": 1, "data": "data1"},
- {"id": 2, "data": "data2", "foo": "plainfoo"},
- {"id": 3, "data": "data3"},
- ])
- result = "INSERT INTO sometable (id, data, foo) VALUES "\
- "(%(id_0)s, %(data_0)s, foobar()), "\
- "(%(id_1)s, %(data_1)s, %(foo_1)s), "\
- "(%(id_2)s, %(data_2)s, foobar())"
-
- self.assert_compile(stmt, result,
- checkparams={'data_2': 'data3', 'id_0': 1, 'id_2': 3,
- 'foo_1': 'plainfoo', 'data_1': 'data2',
- 'id_1': 2, 'data_0': 'data1'},
- dialect=postgresql.dialect())
-
- def test_multirow_server_default_insert(self):
- metadata = MetaData()
- table = Table('sometable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String),
- Column('foo', Integer, server_default=func.foobar()))
-
- stmt = table.insert().\
- values([
- {"id": 1, "data": "data1"},
- {"id": 2, "data": "data2", "foo": "plainfoo"},
- {"id": 3, "data": "data3"},
- ])
- result = "INSERT INTO sometable (id, data) VALUES "\
- "(%(id_0)s, %(data_0)s), "\
- "(%(id_1)s, %(data_1)s), "\
- "(%(id_2)s, %(data_2)s)"
-
- self.assert_compile(stmt, result,
- checkparams={'data_2': 'data3', 'id_0': 1, 'id_2': 3,
- 'data_1': 'data2',
- 'id_1': 2, 'data_0': 'data1'},
- dialect=postgresql.dialect())
-
- stmt = table.insert().\
- values([
- {"id": 1, "data": "data1", "foo": "plainfoo"},
- {"id": 2, "data": "data2"},
- {"id": 3, "data": "data3", "foo": "otherfoo"},
- ])
-
- # note the effect here is that the first set of params
- # takes effect for the rest of them, when one is absent
- result = "INSERT INTO sometable (id, data, foo) VALUES "\
- "(%(id_0)s, %(data_0)s, %(foo_0)s), "\
- "(%(id_1)s, %(data_1)s, %(foo_0)s), "\
- "(%(id_2)s, %(data_2)s, %(foo_2)s)"
-
- self.assert_compile(stmt, result,
- checkparams={'data_2': 'data3', 'id_0': 1, 'id_2': 3,
- 'data_1': 'data2',
- "foo_0": "plainfoo",
- "foo_2": "otherfoo",
- 'id_1': 2, 'data_0': 'data1'},
- dialect=postgresql.dialect())
-
- def test_update(self):
- self.assert_compile(
- update(table1, table1.c.myid == 7),
- "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1",
- params={table1.c.name: 'fred'})
- self.assert_compile(
- table1.update().where(table1.c.myid == 7).
- values({table1.c.myid: 5}),
- "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1",
- checkparams={'myid': 5, 'myid_1': 7})
- self.assert_compile(
- update(table1, table1.c.myid == 7),
- "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1",
- params={'name': 'fred'})
- self.assert_compile(
- update(table1, values={table1.c.name: table1.c.myid}),
- "UPDATE mytable SET name=mytable.myid")
- self.assert_compile(
- update(table1,
- whereclause=table1.c.name == bindparam('crit'),
- values={table1.c.name: 'hi'}),
- "UPDATE mytable SET name=:name WHERE mytable.name = :crit",
- params={'crit': 'notthere'},
- checkparams={'crit': 'notthere', 'name': 'hi'})
- self.assert_compile(
- update(table1, table1.c.myid == 12,
- values={table1.c.name: table1.c.myid}),
- "UPDATE mytable SET name=mytable.myid, description="
- ":description WHERE mytable.myid = :myid_1",
- params={'description': 'test'},
- checkparams={'description': 'test', 'myid_1': 12})
- self.assert_compile(
- update(table1, table1.c.myid == 12,
- values={table1.c.myid: 9}),
- "UPDATE mytable SET myid=:myid, description=:description "
- "WHERE mytable.myid = :myid_1",
- params={'myid_1': 12, 'myid': 9, 'description': 'test'})
- self.assert_compile(
- update(table1, table1.c.myid == 12),
- "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1",
- params={'myid': 18}, checkparams={'myid': 18, 'myid_1': 12})
- s = table1.update(table1.c.myid == 12, values={table1.c.name: 'lala'})
- c = s.compile(column_keys=['id', 'name'])
- self.assert_compile(
- update(table1, table1.c.myid == 12,
- values={table1.c.name: table1.c.myid}
- ).values({table1.c.name: table1.c.name + 'foo'}),
- "UPDATE mytable SET name=(mytable.name || :name_1), "
- "description=:description WHERE mytable.myid = :myid_1",
- params={'description': 'test'})
- eq_(str(s), str(c))
-
- self.assert_compile(update(table1,
- (table1.c.myid == func.hoho(4)) &
- (table1.c.name == literal('foo') +
- table1.c.name + literal('lala')),
- values={
- table1.c.name: table1.c.name + "lala",
- table1.c.myid: func.do_stuff(table1.c.myid, literal('hoho'))
- }), "UPDATE mytable SET myid=do_stuff(mytable.myid, :param_1), "
- "name=(mytable.name || :name_1) "
- "WHERE mytable.myid = hoho(:hoho_1) "
- "AND mytable.name = :param_2 || "
- "mytable.name || :param_3")
-
- def test_update_prefix(self):
- stmt = table1.update().prefix_with("A", "B", dialect="mysql").\
- prefix_with("C", "D")
- self.assert_compile(stmt,
- "UPDATE A B C D mytable SET myid=%s, name=%s, description=%s",
- dialect=mysql.dialect()
- )
- self.assert_compile(stmt,
- "UPDATE C D mytable SET myid=:myid, name=:name, "
- "description=:description")
-
- def test_aliased_update(self):
- talias1 = table1.alias('t1')
- self.assert_compile(
- update(talias1, talias1.c.myid == 7),
- "UPDATE mytable AS t1 SET name=:name WHERE t1.myid = :myid_1",
- params={table1.c.name: 'fred'})
- self.assert_compile(
- update(talias1, table1.c.myid == 7),
- "UPDATE mytable AS t1 SET name=:name FROM "
- "mytable WHERE mytable.myid = :myid_1",
- params={table1.c.name: 'fred'})
-
- def test_update_to_expression(self):
- """test update from an expression.
-
- this logic is triggered currently by a left side that doesn't
- have a key. The current supported use case is updating the index
- of a Postgresql ARRAY type.
-
- """
- expr = func.foo(table1.c.myid)
- assert not hasattr(expr, "key")
- self.assert_compile(
- table1.update().values({expr: 'bar'}),
- "UPDATE mytable SET foo(myid)=:param_1"
- )
def test_correlated_update(self):
# test against a straight text subquery
@@ -2880,51 +2532,6 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL):
"AND myothertable.othername = mytable_1.name",
dialect=mssql.dialect())
- def test_delete(self):
- self.assert_compile(
- delete(table1, table1.c.myid == 7),
- "DELETE FROM mytable WHERE mytable.myid = :myid_1")
- self.assert_compile(
- table1.delete().where(table1.c.myid == 7),
- "DELETE FROM mytable WHERE mytable.myid = :myid_1")
- self.assert_compile(
- table1.delete().where(table1.c.myid == 7).\
- where(table1.c.name == 'somename'),
- "DELETE FROM mytable WHERE mytable.myid = :myid_1 "
- "AND mytable.name = :name_1")
-
- def test_delete_prefix(self):
- stmt = table1.delete().prefix_with("A", "B", dialect="mysql").\
- prefix_with("C", "D")
- self.assert_compile(stmt,
- "DELETE A B C D FROM mytable",
- dialect=mysql.dialect()
- )
- self.assert_compile(stmt,
- "DELETE C D FROM mytable")
-
- def test_aliased_delete(self):
- talias1 = table1.alias('t1')
- self.assert_compile(
- delete(talias1).where(talias1.c.myid == 7),
- "DELETE FROM mytable AS t1 WHERE t1.myid = :myid_1")
-
- def test_correlated_delete(self):
- # test a non-correlated WHERE clause
- s = select([table2.c.othername], table2.c.otherid == 7)
- u = delete(table1, table1.c.name == s)
- self.assert_compile(u, "DELETE FROM mytable WHERE mytable.name = "
- "(SELECT myothertable.othername FROM myothertable "
- "WHERE myothertable.otherid = :otherid_1)")
-
- # test one that is actually correlated...
- s = select([table2.c.othername], table2.c.otherid == table1.c.myid)
- u = table1.delete(table1.c.name == s)
- self.assert_compile(u,
- "DELETE FROM mytable WHERE mytable.name = (SELECT "
- "myothertable.othername FROM myothertable WHERE "
- "myothertable.otherid = mytable.myid)")
-
def test_binds_that_match_columns(self):
"""test bind params named after column names
replace the normal SET/VALUES generation."""
@@ -3189,6 +2796,246 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL):
"(:rem_id, :datatype_id, :value)")
+class CorrelateTest(fixtures.TestBase, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_dont_overcorrelate(self):
+ self.assert_compile(select([table1], from_obj=[table1,
+ table1.select()]),
+ "SELECT mytable.myid, mytable.name, "
+ "mytable.description FROM mytable, (SELECT "
+ "mytable.myid AS myid, mytable.name AS "
+ "name, mytable.description AS description "
+ "FROM mytable)")
+
+ def _fixture(self):
+ t1 = table('t1', column('a'))
+ t2 = table('t2', column('a'))
+ return t1, t2, select([t1]).where(t1.c.a == t2.c.a)
+
+ def _assert_where_correlated(self, stmt):
+ self.assert_compile(
+ stmt,
+ "SELECT t2.a FROM t2 WHERE t2.a = "
+ "(SELECT t1.a FROM t1 WHERE t1.a = t2.a)")
+
+ def _assert_where_all_correlated(self, stmt):
+ self.assert_compile(
+ stmt,
+ "SELECT t1.a, t2.a FROM t1, t2 WHERE t2.a = "
+ "(SELECT t1.a WHERE t1.a = t2.a)")
+
+ def _assert_where_backwards_correlated(self, stmt):
+ self.assert_compile(
+ stmt,
+ "SELECT t2.a FROM t2 WHERE t2.a = "
+ "(SELECT t1.a FROM t2 WHERE t1.a = t2.a)")
+
+ def _assert_column_correlated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t2.a, (SELECT t1.a FROM t1 WHERE t1.a = t2.a) "
+ "AS anon_1 FROM t2")
+
+ def _assert_column_all_correlated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t1.a, t2.a, "
+ "(SELECT t1.a WHERE t1.a = t2.a) AS anon_1 FROM t1, t2")
+
+ def _assert_column_backwards_correlated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t2.a, (SELECT t1.a FROM t2 WHERE t1.a = t2.a) "
+ "AS anon_1 FROM t2")
+
+ def _assert_having_correlated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t2.a FROM t2 HAVING t2.a = "
+ "(SELECT t1.a FROM t1 WHERE t1.a = t2.a)")
+
+ def _assert_from_uncorrelated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t2.a, anon_1.a FROM t2, "
+ "(SELECT t1.a AS a FROM t1, t2 WHERE t1.a = t2.a) AS anon_1")
+
+ def _assert_from_all_uncorrelated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t1.a, t2.a, anon_1.a FROM t1, t2, "
+ "(SELECT t1.a AS a FROM t1, t2 WHERE t1.a = t2.a) AS anon_1")
+
+ def _assert_where_uncorrelated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t2.a FROM t2 WHERE t2.a = "
+ "(SELECT t1.a FROM t1, t2 WHERE t1.a = t2.a)")
+
+ def _assert_column_uncorrelated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t2.a, (SELECT t1.a FROM t1, t2 "
+ "WHERE t1.a = t2.a) AS anon_1 FROM t2")
+
+ def _assert_having_uncorrelated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t2.a FROM t2 HAVING t2.a = "
+ "(SELECT t1.a FROM t1, t2 WHERE t1.a = t2.a)")
+
+ def _assert_where_single_full_correlated(self, stmt):
+ self.assert_compile(stmt,
+ "SELECT t1.a FROM t1 WHERE t1.a = (SELECT t1.a)")
+
+ def test_correlate_semiauto_where(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_where_correlated(
+ select([t2]).where(t2.c.a == s1.correlate(t2)))
+
+ def test_correlate_semiauto_column(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_column_correlated(
+ select([t2, s1.correlate(t2).as_scalar()]))
+
+ def test_correlate_semiauto_from(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_from_uncorrelated(
+ select([t2, s1.correlate(t2).alias()]))
+
+ def test_correlate_semiauto_having(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_having_correlated(
+ select([t2]).having(t2.c.a == s1.correlate(t2)))
+
+ def test_correlate_except_inclusion_where(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_where_correlated(
+ select([t2]).where(t2.c.a == s1.correlate_except(t1)))
+
+ def test_correlate_except_exclusion_where(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_where_backwards_correlated(
+ select([t2]).where(t2.c.a == s1.correlate_except(t2)))
+
+ def test_correlate_except_inclusion_column(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_column_correlated(
+ select([t2, s1.correlate_except(t1).as_scalar()]))
+
+ def test_correlate_except_exclusion_column(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_column_backwards_correlated(
+ select([t2, s1.correlate_except(t2).as_scalar()]))
+
+ def test_correlate_except_inclusion_from(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_from_uncorrelated(
+ select([t2, s1.correlate_except(t1).alias()]))
+
+ def test_correlate_except_exclusion_from(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_from_uncorrelated(
+ select([t2, s1.correlate_except(t2).alias()]))
+
+ def test_correlate_except_having(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_having_correlated(
+ select([t2]).having(t2.c.a == s1.correlate_except(t1)))
+
+ def test_correlate_auto_where(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_where_correlated(
+ select([t2]).where(t2.c.a == s1))
+
+ def test_correlate_auto_column(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_column_correlated(
+ select([t2, s1.as_scalar()]))
+
+ def test_correlate_auto_from(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_from_uncorrelated(
+ select([t2, s1.alias()]))
+
+ def test_correlate_auto_having(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_having_correlated(
+ select([t2]).having(t2.c.a == s1))
+
+ def test_correlate_disabled_where(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_where_uncorrelated(
+ select([t2]).where(t2.c.a == s1.correlate(None)))
+
+ def test_correlate_disabled_column(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_column_uncorrelated(
+ select([t2, s1.correlate(None).as_scalar()]))
+
+ def test_correlate_disabled_from(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_from_uncorrelated(
+ select([t2, s1.correlate(None).alias()]))
+
+ def test_correlate_disabled_having(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_having_uncorrelated(
+ select([t2]).having(t2.c.a == s1.correlate(None)))
+
+ def test_correlate_all_where(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_where_all_correlated(
+ select([t1, t2]).where(t2.c.a == s1.correlate(t1, t2)))
+
+ def test_correlate_all_column(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_column_all_correlated(
+ select([t1, t2, s1.correlate(t1, t2).as_scalar()]))
+
+ def test_correlate_all_from(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_from_all_uncorrelated(
+ select([t1, t2, s1.correlate(t1, t2).alias()]))
+
+ def test_correlate_where_all_unintentional(self):
+ t1, t2, s1 = self._fixture()
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "returned no FROM clauses due to auto-correlation",
+ select([t1, t2]).where(t2.c.a == s1).compile
+ )
+
+ def test_correlate_from_all_ok(self):
+ t1, t2, s1 = self._fixture()
+ self.assert_compile(
+ select([t1, t2, s1]),
+ "SELECT t1.a, t2.a, a FROM t1, t2, "
+ "(SELECT t1.a AS a FROM t1, t2 WHERE t1.a = t2.a)"
+ )
+
+ def test_correlate_auto_where_singlefrom(self):
+ t1, t2, s1 = self._fixture()
+ s = select([t1.c.a])
+ s2 = select([t1]).where(t1.c.a == s)
+ self.assert_compile(s2,
+ "SELECT t1.a FROM t1 WHERE t1.a = "
+ "(SELECT t1.a FROM t1)")
+
+ def test_correlate_semiauto_where_singlefrom(self):
+ t1, t2, s1 = self._fixture()
+
+ s = select([t1.c.a])
+
+ s2 = select([t1]).where(t1.c.a == s.correlate(t1))
+ self._assert_where_single_full_correlated(s2)
+
+ def test_correlate_except_semiauto_where_singlefrom(self):
+ t1, t2, s1 = self._fixture()
+
+ s = select([t1.c.a])
+
+ s2 = select([t1]).where(t1.c.a == s.correlate_except(t2))
+ self._assert_where_single_full_correlated(s2)
+
+ def test_correlate_alone_noeffect(self):
+ # new as of #2668
+ t1, t2, s1 = self._fixture()
+ self.assert_compile(s1.correlate(t1, t2),
+ "SELECT t1.a FROM t1, t2 WHERE t1.a = t2.a")
+
class CoercionTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -3315,4 +3162,4 @@ class ResultMapTest(fixtures.TestBase):
)
is_(
comp.result_map['t1_a'][1][2], t1.c.a
- ) \ No newline at end of file
+ )
diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py
index ab294e1eb..026095c3b 100644
--- a/test/sql/test_constraints.py
+++ b/test/sql/test_constraints.py
@@ -7,6 +7,7 @@ from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy.testing import eq_
from sqlalchemy.testing.assertsql import AllOf, RegexSQL, ExactSQL, CompiledSQL
+from sqlalchemy.sql import table, column
class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
__dialect__ = 'default'
@@ -753,6 +754,18 @@ class ConstraintAPITest(fixtures.TestBase):
c = Index('foo', t.c.a)
assert c in t.indexes
+ def test_auto_append_lowercase_table(self):
+ t = table('t', column('a'))
+ t2 = table('t2', column('a'))
+ for c in (
+ UniqueConstraint(t.c.a),
+ CheckConstraint(t.c.a > 5),
+ ForeignKeyConstraint([t.c.a], [t2.c.a]),
+ PrimaryKeyConstraint(t.c.a),
+ Index('foo', t.c.a)
+ ):
+ assert True
+
def test_tometadata_ok(self):
m = MetaData()
diff --git a/test/sql/test_delete.py b/test/sql/test_delete.py
new file mode 100644
index 000000000..b56731515
--- /dev/null
+++ b/test/sql/test_delete.py
@@ -0,0 +1,86 @@
+#! coding:utf-8
+
+from sqlalchemy import Column, Integer, String, Table, delete, select
+from sqlalchemy.dialects import mysql
+from sqlalchemy.testing import AssertsCompiledSQL, fixtures
+
+
+class _DeleteTestBase(object):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('mytable', metadata,
+ Column('myid', Integer),
+ Column('name', String(30)),
+ Column('description', String(50)))
+ Table('myothertable', metadata,
+ Column('otherid', Integer),
+ Column('othername', String(30)))
+
+
+class DeleteTest(_DeleteTestBase, fixtures.TablesTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_delete(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(
+ delete(table1, table1.c.myid == 7),
+ 'DELETE FROM mytable WHERE mytable.myid = :myid_1')
+
+ self.assert_compile(
+ table1.delete().where(table1.c.myid == 7),
+ 'DELETE FROM mytable WHERE mytable.myid = :myid_1')
+
+ self.assert_compile(
+ table1.delete().
+ where(table1.c.myid == 7).
+ where(table1.c.name == 'somename'),
+ 'DELETE FROM mytable '
+ 'WHERE mytable.myid = :myid_1 '
+ 'AND mytable.name = :name_1')
+
+ def test_prefix_with(self):
+ table1 = self.tables.mytable
+
+ stmt = table1.delete().\
+ prefix_with('A', 'B', dialect='mysql').\
+ prefix_with('C', 'D')
+
+ self.assert_compile(stmt,
+ 'DELETE C D FROM mytable')
+
+ self.assert_compile(stmt,
+ 'DELETE A B C D FROM mytable',
+ dialect=mysql.dialect())
+
+ def test_alias(self):
+ table1 = self.tables.mytable
+
+ talias1 = table1.alias('t1')
+ stmt = delete(talias1).where(talias1.c.myid == 7)
+
+ self.assert_compile(stmt,
+ 'DELETE FROM mytable AS t1 WHERE t1.myid = :myid_1')
+
+ def test_correlated(self):
+ table1, table2 = self.tables.mytable, self.tables.myothertable
+
+ # test a non-correlated WHERE clause
+ s = select([table2.c.othername], table2.c.otherid == 7)
+ self.assert_compile(delete(table1, table1.c.name == s),
+ 'DELETE FROM mytable '
+ 'WHERE mytable.name = ('
+ 'SELECT myothertable.othername '
+ 'FROM myothertable '
+ 'WHERE myothertable.otherid = :otherid_1'
+ ')')
+
+ # test one that is actually correlated...
+ s = select([table2.c.othername], table2.c.otherid == table1.c.myid)
+ self.assert_compile(table1.delete(table1.c.name == s),
+ 'DELETE FROM mytable '
+ 'WHERE mytable.name = ('
+ 'SELECT myothertable.othername '
+ 'FROM myothertable '
+ 'WHERE myothertable.otherid = mytable.myid'
+ ')')
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index ae8e28e24..b325b7763 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -8,7 +8,7 @@ from sqlalchemy.testing.engines import all_dialects
from sqlalchemy import types as sqltypes
from sqlalchemy.sql import functions
from sqlalchemy.sql.functions import GenericFunction
-from sqlalchemy.util.compat import decimal
+import decimal
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, engines
from sqlalchemy.dialects import sqlite, postgresql, mysql, oracle
diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py
index e868cbe88..8b2abef0e 100644
--- a/test/sql/test_generative.py
+++ b/test/sql/test_generative.py
@@ -590,13 +590,18 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
def test_correlated_select(self):
s = select(['*'], t1.c.col1 == t2.c.col1,
from_obj=[t1, t2]).correlate(t2)
+
class Vis(CloningVisitor):
def visit_select(self, select):
select.append_whereclause(t1.c.col2 == 7)
- self.assert_compile(Vis().traverse(s),
- "SELECT * FROM table1 WHERE table1.col1 = table2.col1 "
- "AND table1.col2 = :col2_1")
+ self.assert_compile(
+ select([t2]).where(t2.c.col1 == Vis().traverse(s)),
+ "SELECT table2.col1, table2.col2, table2.col3 "
+ "FROM table2 WHERE table2.col1 = "
+ "(SELECT * FROM table1 WHERE table1.col1 = table2.col1 "
+ "AND table1.col2 = :col2_1)"
+ )
def test_this_thing(self):
s = select([t1]).where(t1.c.col1 == 'foo').alias()
@@ -616,35 +621,49 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
'AS table1_1 WHERE table1_1.col1 = '
':col1_1) AS anon_1')
- def test_select_fromtwice(self):
+ def test_select_fromtwice_one(self):
t1a = t1.alias()
- s = select([1], t1.c.col1 == t1a.c.col1, from_obj=t1a).correlate(t1)
+ s = select([1], t1.c.col1 == t1a.c.col1, from_obj=t1a).correlate(t1a)
+ s = select([t1]).where(t1.c.col1 == s)
self.assert_compile(s,
- 'SELECT 1 FROM table1 AS table1_1 WHERE '
- 'table1.col1 = table1_1.col1')
-
+ "SELECT table1.col1, table1.col2, table1.col3 FROM table1 "
+ "WHERE table1.col1 = "
+ "(SELECT 1 FROM table1, table1 AS table1_1 "
+ "WHERE table1.col1 = table1_1.col1)"
+ )
s = CloningVisitor().traverse(s)
self.assert_compile(s,
- 'SELECT 1 FROM table1 AS table1_1 WHERE '
- 'table1.col1 = table1_1.col1')
+ "SELECT table1.col1, table1.col2, table1.col3 FROM table1 "
+ "WHERE table1.col1 = "
+ "(SELECT 1 FROM table1, table1 AS table1_1 "
+ "WHERE table1.col1 = table1_1.col1)")
+ def test_select_fromtwice_two(self):
s = select([t1]).where(t1.c.col1 == 'foo').alias()
s2 = select([1], t1.c.col1 == s.c.col1, from_obj=s).correlate(t1)
- self.assert_compile(s2,
- 'SELECT 1 FROM (SELECT table1.col1 AS '
- 'col1, table1.col2 AS col2, table1.col3 AS '
- 'col3 FROM table1 WHERE table1.col1 = '
- ':col1_1) AS anon_1 WHERE table1.col1 = '
- 'anon_1.col1')
- s2 = ReplacingCloningVisitor().traverse(s2)
- self.assert_compile(s2,
- 'SELECT 1 FROM (SELECT table1.col1 AS '
- 'col1, table1.col2 AS col2, table1.col3 AS '
- 'col3 FROM table1 WHERE table1.col1 = '
- ':col1_1) AS anon_1 WHERE table1.col1 = '
- 'anon_1.col1')
+ s3 = select([t1]).where(t1.c.col1 == s2)
+ self.assert_compile(s3,
+ "SELECT table1.col1, table1.col2, table1.col3 "
+ "FROM table1 WHERE table1.col1 = "
+ "(SELECT 1 FROM "
+ "(SELECT table1.col1 AS col1, table1.col2 AS col2, "
+ "table1.col3 AS col3 FROM table1 "
+ "WHERE table1.col1 = :col1_1) "
+ "AS anon_1 WHERE table1.col1 = anon_1.col1)"
+ )
+
+ s4 = ReplacingCloningVisitor().traverse(s3)
+ self.assert_compile(s4,
+ "SELECT table1.col1, table1.col2, table1.col3 "
+ "FROM table1 WHERE table1.col1 = "
+ "(SELECT 1 FROM "
+ "(SELECT table1.col1 AS col1, table1.col2 AS col2, "
+ "table1.col3 AS col3 FROM table1 "
+ "WHERE table1.col1 = :col1_1) "
+ "AS anon_1 WHERE table1.col1 = anon_1.col1)"
+ )
class ClauseAdapterTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -763,67 +782,125 @@ class ClauseAdapterTest(fixtures.TestBase, AssertsCompiledSQL):
'FROM addresses WHERE users_1.id = '
'addresses.user_id')
- def test_table_to_alias(self):
-
+ def test_table_to_alias_1(self):
t1alias = t1.alias('t1alias')
vis = sql_util.ClauseAdapter(t1alias)
ff = vis.traverse(func.count(t1.c.col1).label('foo'))
assert list(_from_objects(ff)) == [t1alias]
+ def test_table_to_alias_2(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(vis.traverse(select(['*'], from_obj=[t1])),
'SELECT * FROM table1 AS t1alias')
+
+ def test_table_to_alias_3(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(select(['*'], t1.c.col1 == t2.c.col2),
'SELECT * FROM table1, table2 WHERE '
'table1.col1 = table2.col2')
+
+ def test_table_to_alias_4(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(vis.traverse(select(['*'], t1.c.col1
== t2.c.col2)),
'SELECT * FROM table1 AS t1alias, table2 '
'WHERE t1alias.col1 = table2.col2')
+
+ def test_table_to_alias_5(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(vis.traverse(select(['*'], t1.c.col1
== t2.c.col2, from_obj=[t1, t2])),
'SELECT * FROM table1 AS t1alias, table2 '
'WHERE t1alias.col1 = table2.col2')
- self.assert_compile(vis.traverse(select(['*'], t1.c.col1
- == t2.c.col2, from_obj=[t1,
- t2]).correlate(t1)),
- 'SELECT * FROM table2 WHERE t1alias.col1 = '
- 'table2.col2')
- self.assert_compile(vis.traverse(select(['*'], t1.c.col1
- == t2.c.col2, from_obj=[t1,
- t2]).correlate(t2)),
- 'SELECT * FROM table1 AS t1alias WHERE '
- 't1alias.col1 = table2.col2')
+
+ def test_table_to_alias_6(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
+ self.assert_compile(
+ select([t1alias, t2]).where(t1alias.c.col1 ==
+ vis.traverse(select(['*'],
+ t1.c.col1 == t2.c.col2,
+ from_obj=[t1, t2]).correlate(t1))),
+ "SELECT t1alias.col1, t1alias.col2, t1alias.col3, "
+ "table2.col1, table2.col2, table2.col3 "
+ "FROM table1 AS t1alias, table2 WHERE t1alias.col1 = "
+ "(SELECT * FROM table2 WHERE t1alias.col1 = table2.col2)"
+ )
+
+ def test_table_to_alias_7(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
+ self.assert_compile(
+ select([t1alias, t2]).where(t1alias.c.col1 ==
+ vis.traverse(select(['*'],
+ t1.c.col1 == t2.c.col2,
+ from_obj=[t1, t2]).correlate(t2))),
+ "SELECT t1alias.col1, t1alias.col2, t1alias.col3, "
+ "table2.col1, table2.col2, table2.col3 "
+ "FROM table1 AS t1alias, table2 "
+ "WHERE t1alias.col1 = "
+ "(SELECT * FROM table1 AS t1alias "
+ "WHERE t1alias.col1 = table2.col2)")
+
+ def test_table_to_alias_8(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(vis.traverse(case([(t1.c.col1 == 5,
t1.c.col2)], else_=t1.c.col1)),
'CASE WHEN (t1alias.col1 = :col1_1) THEN '
't1alias.col2 ELSE t1alias.col1 END')
+
+ def test_table_to_alias_9(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(vis.traverse(case([(5, t1.c.col2)],
value=t1.c.col1, else_=t1.c.col1)),
'CASE t1alias.col1 WHEN :param_1 THEN '
't1alias.col2 ELSE t1alias.col1 END')
+ def test_table_to_alias_10(self):
s = select(['*'], from_obj=[t1]).alias('foo')
self.assert_compile(s.select(),
'SELECT foo.* FROM (SELECT * FROM table1) '
'AS foo')
+
+ def test_table_to_alias_11(self):
+ s = select(['*'], from_obj=[t1]).alias('foo')
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
self.assert_compile(vis.traverse(s.select()),
'SELECT foo.* FROM (SELECT * FROM table1 '
'AS t1alias) AS foo')
+
+ def test_table_to_alias_12(self):
+ s = select(['*'], from_obj=[t1]).alias('foo')
self.assert_compile(s.select(),
'SELECT foo.* FROM (SELECT * FROM table1) '
'AS foo')
+
+ def test_table_to_alias_13(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
ff = vis.traverse(func.count(t1.c.col1).label('foo'))
self.assert_compile(select([ff]),
'SELECT count(t1alias.col1) AS foo FROM '
'table1 AS t1alias')
assert list(_from_objects(ff)) == [t1alias]
+ #def test_table_to_alias_2(self):
# TODO: self.assert_compile(vis.traverse(select([func.count(t1.c
# .col1).l abel('foo')]), clone=True), "SELECT
# count(t1alias.col1) AS foo FROM table1 AS t1alias")
+ def test_table_to_alias_14(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
t2alias = t2.alias('t2alias')
vis.chain(sql_util.ClauseAdapter(t2alias))
self.assert_compile(vis.traverse(select(['*'], t1.c.col1
@@ -831,28 +908,59 @@ class ClauseAdapterTest(fixtures.TestBase, AssertsCompiledSQL):
'SELECT * FROM table1 AS t1alias, table2 '
'AS t2alias WHERE t1alias.col1 = '
't2alias.col2')
+
+ def test_table_to_alias_15(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
+ t2alias = t2.alias('t2alias')
+ vis.chain(sql_util.ClauseAdapter(t2alias))
self.assert_compile(vis.traverse(select(['*'], t1.c.col1
== t2.c.col2, from_obj=[t1, t2])),
'SELECT * FROM table1 AS t1alias, table2 '
'AS t2alias WHERE t1alias.col1 = '
't2alias.col2')
- self.assert_compile(vis.traverse(select(['*'], t1.c.col1
- == t2.c.col2, from_obj=[t1,
- t2]).correlate(t1)),
- 'SELECT * FROM table2 AS t2alias WHERE '
- 't1alias.col1 = t2alias.col2')
- self.assert_compile(vis.traverse(select(['*'], t1.c.col1
- == t2.c.col2, from_obj=[t1,
- t2]).correlate(t2)),
- 'SELECT * FROM table1 AS t1alias WHERE '
- 't1alias.col1 = t2alias.col2')
+
+ def test_table_to_alias_16(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
+ t2alias = t2.alias('t2alias')
+ vis.chain(sql_util.ClauseAdapter(t2alias))
+ self.assert_compile(
+ select([t1alias, t2alias]).where(
+ t1alias.c.col1 ==
+ vis.traverse(select(['*'],
+ t1.c.col1 == t2.c.col2,
+ from_obj=[t1, t2]).correlate(t1))
+ ),
+ "SELECT t1alias.col1, t1alias.col2, t1alias.col3, "
+ "t2alias.col1, t2alias.col2, t2alias.col3 "
+ "FROM table1 AS t1alias, table2 AS t2alias "
+ "WHERE t1alias.col1 = "
+ "(SELECT * FROM table2 AS t2alias "
+ "WHERE t1alias.col1 = t2alias.col2)"
+ )
+
+ def test_table_to_alias_17(self):
+ t1alias = t1.alias('t1alias')
+ vis = sql_util.ClauseAdapter(t1alias)
+ t2alias = t2.alias('t2alias')
+ vis.chain(sql_util.ClauseAdapter(t2alias))
+ self.assert_compile(
+ t2alias.select().where(t2alias.c.col2 ==
+ vis.traverse(select(['*'],
+ t1.c.col1 == t2.c.col2,
+ from_obj=[t1, t2]).correlate(t2))),
+ 'SELECT t2alias.col1, t2alias.col2, t2alias.col3 '
+ 'FROM table2 AS t2alias WHERE t2alias.col2 = '
+ '(SELECT * FROM table1 AS t1alias WHERE '
+ 't1alias.col1 = t2alias.col2)')
def test_include_exclude(self):
m = MetaData()
- a=Table( 'a',m,
- Column( 'id', Integer, primary_key=True),
- Column( 'xxx_id', Integer,
- ForeignKey( 'a.id', name='adf',use_alter=True )
+ a = Table('a', m,
+ Column('id', Integer, primary_key=True),
+ Column('xxx_id', Integer,
+ ForeignKey('a.id', name='adf', use_alter=True)
)
)
@@ -1167,93 +1275,6 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
'SELECT table1.col1, table1.col2, '
'table1.col3 FROM table1')
- def test_correlation(self):
- s = select([t2], t1.c.col1 == t2.c.col1)
- self.assert_compile(s,
- 'SELECT table2.col1, table2.col2, '
- 'table2.col3 FROM table2, table1 WHERE '
- 'table1.col1 = table2.col1')
- s2 = select([t1], t1.c.col2 == s.c.col2)
- # dont correlate in a FROM entry
- self.assert_compile(s2,
- 'SELECT table1.col1, table1.col2, '
- 'table1.col3 FROM table1, (SELECT '
- 'table2.col1 AS col1, table2.col2 AS col2, '
- 'table2.col3 AS col3 FROM table2, table1 WHERE '
- 'table1.col1 = table2.col1) WHERE '
- 'table1.col2 = col2')
- s3 = s.correlate(None)
- self.assert_compile(select([t1], t1.c.col2 == s3.c.col2),
- 'SELECT table1.col1, table1.col2, '
- 'table1.col3 FROM table1, (SELECT '
- 'table2.col1 AS col1, table2.col2 AS col2, '
- 'table2.col3 AS col3 FROM table2, table1 '
- 'WHERE table1.col1 = table2.col1) WHERE '
- 'table1.col2 = col2')
- # dont correlate in a FROM entry
- self.assert_compile(select([t1], t1.c.col2 == s.c.col2),
- 'SELECT table1.col1, table1.col2, '
- 'table1.col3 FROM table1, (SELECT '
- 'table2.col1 AS col1, table2.col2 AS col2, '
- 'table2.col3 AS col3 FROM table2, table1 WHERE '
- 'table1.col1 = table2.col1) WHERE '
- 'table1.col2 = col2')
-
- # but correlate in a WHERE entry
- s_w = select([t2.c.col1]).where(t1.c.col1 == t2.c.col1)
- self.assert_compile(select([t1], t1.c.col2 == s_w),
- 'SELECT table1.col1, table1.col2, table1.col3 '
- 'FROM table1 WHERE table1.col2 = '
- '(SELECT table2.col1 FROM table2 '
- 'WHERE table1.col1 = table2.col1)'
- )
-
-
- s4 = s3.correlate(t1)
- self.assert_compile(select([t1], t1.c.col2 == s4.c.col2),
- 'SELECT table1.col1, table1.col2, '
- 'table1.col3 FROM table1, (SELECT '
- 'table2.col1 AS col1, table2.col2 AS col2, '
- 'table2.col3 AS col3 FROM table2 WHERE '
- 'table1.col1 = table2.col1) WHERE '
- 'table1.col2 = col2')
-
- self.assert_compile(select([t1], t1.c.col2 == s3.c.col2),
- 'SELECT table1.col1, table1.col2, '
- 'table1.col3 FROM table1, (SELECT '
- 'table2.col1 AS col1, table2.col2 AS col2, '
- 'table2.col3 AS col3 FROM table2, table1 '
- 'WHERE table1.col1 = table2.col1) WHERE '
- 'table1.col2 = col2')
-
- self.assert_compile(t1.select().where(t1.c.col1
- == 5).order_by(t1.c.col3),
- 'SELECT table1.col1, table1.col2, '
- 'table1.col3 FROM table1 WHERE table1.col1 '
- '= :col1_1 ORDER BY table1.col3')
-
- # dont correlate in FROM
- self.assert_compile(t1.select().select_from(select([t2],
- t2.c.col1
- == t1.c.col1)).order_by(t1.c.col3),
- 'SELECT table1.col1, table1.col2, '
- 'table1.col3 FROM table1, (SELECT '
- 'table2.col1 AS col1, table2.col2 AS col2, '
- 'table2.col3 AS col3 FROM table2, table1 WHERE '
- 'table2.col1 = table1.col1) ORDER BY '
- 'table1.col3')
-
- # still works if you actually add that table to correlate()
- s = select([t2], t2.c.col1 == t1.c.col1)
- s = s.correlate(t1).order_by(t2.c.col3)
-
- self.assert_compile(t1.select().select_from(s).order_by(t1.c.col3),
- 'SELECT table1.col1, table1.col2, '
- 'table1.col3 FROM table1, (SELECT '
- 'table2.col1 AS col1, table2.col2 AS col2, '
- 'table2.col3 AS col3 FROM table2 WHERE '
- 'table2.col1 = table1.col1 ORDER BY '
- 'table2.col3) ORDER BY table1.col3')
def test_prefixes(self):
s = t1.select()
diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py
new file mode 100644
index 000000000..cd040538f
--- /dev/null
+++ b/test/sql/test_insert.py
@@ -0,0 +1,312 @@
+#! coding:utf-8
+
+from sqlalchemy import Column, Integer, MetaData, String, Table,\
+ bindparam, exc, func, insert
+from sqlalchemy.dialects import mysql, postgresql
+from sqlalchemy.engine import default
+from sqlalchemy.testing import AssertsCompiledSQL,\
+ assert_raises_message, fixtures
+
+
+class _InsertTestBase(object):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('mytable', metadata,
+ Column('myid', Integer),
+ Column('name', String(30)),
+ Column('description', String(30)))
+ Table('myothertable', metadata,
+ Column('otherid', Integer),
+ Column('othername', String(30)))
+
+
+class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_generic_insert_bind_params_all_columns(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(insert(table1),
+ 'INSERT INTO mytable (myid, name, description) '
+ 'VALUES (:myid, :name, :description)')
+
+ def test_insert_with_values_dict(self):
+ table1 = self.tables.mytable
+
+ checkparams = {
+ 'myid': 3,
+ 'name': 'jack'
+ }
+
+ self.assert_compile(insert(table1, dict(myid=3, name='jack')),
+ 'INSERT INTO mytable (myid, name) VALUES (:myid, :name)',
+ checkparams=checkparams)
+
+ def test_insert_with_values_tuple(self):
+ table1 = self.tables.mytable
+
+ checkparams = {
+ 'myid': 3,
+ 'name': 'jack',
+ 'description': 'mydescription'
+ }
+
+ self.assert_compile(insert(table1, (3, 'jack', 'mydescription')),
+ 'INSERT INTO mytable (myid, name, description) '
+ 'VALUES (:myid, :name, :description)',
+ checkparams=checkparams)
+
+ def test_insert_with_values_func(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(insert(table1, values=dict(myid=func.lala())),
+ 'INSERT INTO mytable (myid) VALUES (lala())')
+
+ def test_insert_with_user_supplied_bind_params(self):
+ table1 = self.tables.mytable
+
+ values = {
+ table1.c.myid: bindparam('userid'),
+ table1.c.name: bindparam('username')
+ }
+
+ self.assert_compile(insert(table1, values),
+ 'INSERT INTO mytable (myid, name) VALUES (:userid, :username)')
+
+ def test_insert_values(self):
+ table1 = self.tables.mytable
+
+ values1 = {table1.c.myid: bindparam('userid')}
+ values2 = {table1.c.name: bindparam('username')}
+
+ self.assert_compile(insert(table1, values=values1).values(values2),
+ 'INSERT INTO mytable (myid, name) VALUES (:userid, :username)')
+
+ def test_prefix_with(self):
+ table1 = self.tables.mytable
+
+ stmt = table1.insert().\
+ prefix_with('A', 'B', dialect='mysql').\
+ prefix_with('C', 'D')
+
+ self.assert_compile(stmt,
+ 'INSERT C D INTO mytable (myid, name, description) '
+ 'VALUES (:myid, :name, :description)')
+
+ self.assert_compile(stmt,
+ 'INSERT A B C D INTO mytable (myid, name, description) '
+ 'VALUES (%s, %s, %s)', dialect=mysql.dialect())
+
+ def test_inline_default(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=func.foobar()))
+
+ self.assert_compile(table.insert(values={}, inline=True),
+ 'INSERT INTO sometable (foo) VALUES (foobar())')
+
+ self.assert_compile(table.insert(inline=True),
+ 'INSERT INTO sometable (foo) VALUES (foobar())', params={})
+
+ def test_insert_returning_not_in_default(self):
+ table1 = self.tables.mytable
+
+ stmt = table1.insert().returning(table1.c.myid)
+ assert_raises_message(
+ exc.CompileError,
+ "RETURNING is not supported by this dialect's statement compiler.",
+ stmt.compile,
+ dialect=default.DefaultDialect()
+ )
+
+class EmptyTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_empty_insert_default(self):
+ table1 = self.tables.mytable
+
+ stmt = table1.insert().values({}) # hide from 2to3
+ self.assert_compile(stmt, 'INSERT INTO mytable () VALUES ()')
+
+ def test_supports_empty_insert_true(self):
+ table1 = self.tables.mytable
+
+ dialect = default.DefaultDialect()
+ dialect.supports_empty_insert = dialect.supports_default_values = True
+
+ stmt = table1.insert().values({}) # hide from 2to3
+ self.assert_compile(stmt,
+ 'INSERT INTO mytable DEFAULT VALUES',
+ dialect=dialect)
+
+ def test_supports_empty_insert_false(self):
+ table1 = self.tables.mytable
+
+ dialect = default.DefaultDialect()
+ dialect.supports_empty_insert = dialect.supports_default_values = False
+
+ stmt = table1.insert().values({}) # hide from 2to3
+ assert_raises_message(exc.CompileError,
+ "The 'default' dialect with current database version "
+ "settings does not support empty inserts.",
+ stmt.compile, dialect=dialect)
+
+
+class MultirowTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_not_supported(self):
+ table1 = self.tables.mytable
+
+ dialect = default.DefaultDialect()
+ stmt = table1.insert().values([{'myid': 1}, {'myid': 2}])
+ assert_raises_message(
+ exc.CompileError,
+ "The 'default' dialect with current database version settings "
+ "does not support in-place multirow inserts.",
+ stmt.compile, dialect=dialect)
+
+ def test_named(self):
+ table1 = self.tables.mytable
+
+ values = [
+ {'myid': 1, 'name': 'a', 'description': 'b'},
+ {'myid': 2, 'name': 'c', 'description': 'd'},
+ {'myid': 3, 'name': 'e', 'description': 'f'}
+ ]
+
+ checkparams = {
+ 'myid_0': 1,
+ 'myid_1': 2,
+ 'myid_2': 3,
+ 'name_0': 'a',
+ 'name_1': 'c',
+ 'name_2': 'e',
+ 'description_0': 'b',
+ 'description_1': 'd',
+ 'description_2': 'f',
+ }
+
+ dialect = default.DefaultDialect()
+ dialect.supports_multivalues_insert = True
+
+ self.assert_compile(table1.insert().values(values),
+ 'INSERT INTO mytable (myid, name, description) VALUES '
+ '(:myid_0, :name_0, :description_0), '
+ '(:myid_1, :name_1, :description_1), '
+ '(:myid_2, :name_2, :description_2)',
+ checkparams=checkparams, dialect=dialect)
+
+ def test_positional(self):
+ table1 = self.tables.mytable
+
+ values = [
+ {'myid': 1, 'name': 'a', 'description': 'b'},
+ {'myid': 2, 'name': 'c', 'description': 'd'},
+ {'myid': 3, 'name': 'e', 'description': 'f'}
+ ]
+
+ checkpositional = (1, 'a', 'b', 2, 'c', 'd', 3, 'e', 'f')
+
+ dialect = default.DefaultDialect()
+ dialect.supports_multivalues_insert = True
+ dialect.paramstyle = 'format'
+ dialect.positional = True
+
+ self.assert_compile(table1.insert().values(values),
+ 'INSERT INTO mytable (myid, name, description) VALUES '
+ '(%s, %s, %s), (%s, %s, %s), (%s, %s, %s)',
+ checkpositional=checkpositional, dialect=dialect)
+
+ def test_inline_default(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String),
+ Column('foo', Integer, default=func.foobar()))
+
+ values = [
+ {'id': 1, 'data': 'data1'},
+ {'id': 2, 'data': 'data2', 'foo': 'plainfoo'},
+ {'id': 3, 'data': 'data3'},
+ ]
+
+ checkparams = {
+ 'id_0': 1,
+ 'id_1': 2,
+ 'id_2': 3,
+ 'data_0': 'data1',
+ 'data_1': 'data2',
+ 'data_2': 'data3',
+ 'foo_1': 'plainfoo',
+ }
+
+ self.assert_compile(table.insert().values(values),
+ 'INSERT INTO sometable (id, data, foo) VALUES '
+ '(%(id_0)s, %(data_0)s, foobar()), '
+ '(%(id_1)s, %(data_1)s, %(foo_1)s), '
+ '(%(id_2)s, %(data_2)s, foobar())',
+ checkparams=checkparams, dialect=postgresql.dialect())
+
+ def test_server_default(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String),
+ Column('foo', Integer, server_default=func.foobar()))
+
+ values = [
+ {'id': 1, 'data': 'data1'},
+ {'id': 2, 'data': 'data2', 'foo': 'plainfoo'},
+ {'id': 3, 'data': 'data3'},
+ ]
+
+ checkparams = {
+ 'id_0': 1,
+ 'id_1': 2,
+ 'id_2': 3,
+ 'data_0': 'data1',
+ 'data_1': 'data2',
+ 'data_2': 'data3',
+ }
+
+ self.assert_compile(table.insert().values(values),
+ 'INSERT INTO sometable (id, data) VALUES '
+ '(%(id_0)s, %(data_0)s), '
+ '(%(id_1)s, %(data_1)s), '
+ '(%(id_2)s, %(data_2)s)',
+ checkparams=checkparams, dialect=postgresql.dialect())
+
+ def test_server_default_absent_value(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String),
+ Column('foo', Integer, server_default=func.foobar()))
+
+ values = [
+ {'id': 1, 'data': 'data1', 'foo': 'plainfoo'},
+ {'id': 2, 'data': 'data2'},
+ {'id': 3, 'data': 'data3', 'foo': 'otherfoo'},
+ ]
+
+ checkparams = {
+ 'id_0': 1,
+ 'id_1': 2,
+ 'id_2': 3,
+ 'data_0': 'data1',
+ 'data_1': 'data2',
+ 'data_2': 'data3',
+ 'foo_0': 'plainfoo',
+ 'foo_2': 'otherfoo',
+ }
+
+ # note the effect here is that the first set of params
+ # takes effect for the rest of them, when one is absent
+ self.assert_compile(table.insert().values(values),
+ 'INSERT INTO sometable (id, data, foo) VALUES '
+ '(%(id_0)s, %(data_0)s, %(foo_0)s), '
+ '(%(id_1)s, %(data_1)s, %(foo_0)s), '
+ '(%(id_2)s, %(data_2)s, %(foo_2)s)',
+ checkparams=checkparams, dialect=postgresql.dialect())
diff --git a/test/sql/test_labels.py b/test/sql/test_labels.py
index d7cb8db4a..fd45d303f 100644
--- a/test/sql/test_labels.py
+++ b/test/sql/test_labels.py
@@ -1,19 +1,15 @@
-
-from sqlalchemy import exc as exceptions
-from sqlalchemy import testing
-from sqlalchemy.testing import engines
-from sqlalchemy import select, MetaData, Integer, or_
+from sqlalchemy import exc as exceptions, select, MetaData, Integer, or_
from sqlalchemy.engine import default
from sqlalchemy.sql import table, column
-from sqlalchemy.testing import assert_raises, eq_
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL
-from sqlalchemy.testing.engines import testing_engine
+from sqlalchemy.testing import AssertsCompiledSQL, assert_raises, engines,\
+ fixtures
from sqlalchemy.testing.schema import Table, Column
IDENT_LENGTH = 29
class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
+ __dialect__ = 'DefaultDialect'
table1 = table('some_large_named_table',
column('this_is_the_primarykey_column'),
@@ -25,9 +21,6 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
column('this_is_the_data_column')
)
- __dialect__ = 'DefaultDialect'
-
-
def _length_fixture(self, length=IDENT_LENGTH, positional=False):
dialect = default.DefaultDialect()
dialect.max_identifier_length = length
@@ -60,7 +53,7 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
ta = table2.alias()
on = table1.c.this_is_the_data_column == ta.c.this_is_the_data_column
self.assert_compile(
- select([table1, ta]).select_from(table1.join(ta, on)).\
+ select([table1, ta]).select_from(table1.join(ta, on)).
where(ta.c.this_is_the_data_column == 'data3'),
'SELECT '
'some_large_named_table.this_is_the_primarykey_column, '
@@ -87,16 +80,9 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
t = Table('this_name_is_too_long_for_what_were_doing_in_this_test',
m, Column('foo', Integer))
eng = self._engine_fixture()
- for meth in (
- t.create,
- t.drop,
- m.create_all,
- m.drop_all
- ):
- assert_raises(
- exceptions.IdentifierError,
- meth, eng
- )
+ methods = (t.create, t.drop, m.create_all, m.drop_all)
+ for meth in methods:
+ assert_raises(exceptions.IdentifierError, meth, eng)
def _assert_labeled_table1_select(self, s):
table1 = self.table1
@@ -263,7 +249,9 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=self._length_fixture(positional=True)
)
+
class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
+ __dialect__ = 'DefaultDialect'
table1 = table('some_large_named_table',
column('this_is_the_primarykey_column'),
@@ -275,8 +263,6 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
column('this_is_the_data_column')
)
- __dialect__ = 'DefaultDialect'
-
def test_adjustable_1(self):
table1 = self.table1
q = table1.select(
@@ -404,27 +390,27 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
'AS _1',
dialect=compile_dialect)
-
def test_adjustable_result_schema_column_1(self):
table1 = self.table1
+
q = table1.select(
table1.c.this_is_the_primarykey_column == 4).apply_labels().\
alias('foo')
- dialect = default.DefaultDialect(label_length=10)
+ dialect = default.DefaultDialect(label_length=10)
compiled = q.compile(dialect=dialect)
+
assert set(compiled.result_map['some_2'][1]).issuperset([
- table1.c.this_is_the_data_column,
- 'some_large_named_table_this_is_the_data_column',
- 'some_2'
+ table1.c.this_is_the_data_column,
+ 'some_large_named_table_this_is_the_data_column',
+ 'some_2'
+ ])
- ])
assert set(compiled.result_map['some_1'][1]).issuperset([
- table1.c.this_is_the_primarykey_column,
- 'some_large_named_table_this_is_the_primarykey_column',
- 'some_1'
-
- ])
+ table1.c.this_is_the_primarykey_column,
+ 'some_large_named_table_this_is_the_primarykey_column',
+ 'some_1'
+ ])
def test_adjustable_result_schema_column_2(self):
table1 = self.table1
@@ -434,20 +420,17 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
x = select([q])
dialect = default.DefaultDialect(label_length=10)
-
compiled = x.compile(dialect=dialect)
+
assert set(compiled.result_map['this_2'][1]).issuperset([
- q.corresponding_column(table1.c.this_is_the_data_column),
- 'this_is_the_data_column',
- 'this_2'
+ q.corresponding_column(table1.c.this_is_the_data_column),
+ 'this_is_the_data_column',
+ 'this_2'])
- ])
assert set(compiled.result_map['this_1'][1]).issuperset([
- q.corresponding_column(table1.c.this_is_the_primarykey_column),
- 'this_is_the_primarykey_column',
- 'this_1'
-
- ])
+ q.corresponding_column(table1.c.this_is_the_primarykey_column),
+ 'this_is_the_primarykey_column',
+ 'this_1'])
def test_table_plus_column_exceeds_length(self):
"""test that the truncation only occurs when tablename + colname are
@@ -490,7 +473,6 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
'other_thirty_characters_table_.thirty_characters_table_id',
dialect=compile_dialect)
-
def test_colnames_longer_than_labels_lowercase(self):
t1 = table('a', column('abcde'))
self._test_colnames_longer_than_labels(t1)
@@ -507,30 +489,18 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL):
# 'abcde' is longer than 4, but rendered as itself
# needs to have all characters
s = select([a1])
- self.assert_compile(
- select([a1]),
- "SELECT asdf.abcde FROM a AS asdf",
- dialect=dialect
- )
+ self.assert_compile(select([a1]),
+ 'SELECT asdf.abcde FROM a AS asdf',
+ dialect=dialect)
compiled = s.compile(dialect=dialect)
assert set(compiled.result_map['abcde'][1]).issuperset([
- 'abcde',
- a1.c.abcde,
- 'abcde'
- ])
+ 'abcde', a1.c.abcde, 'abcde'])
# column still there, but short label
s = select([a1]).apply_labels()
- self.assert_compile(
- s,
- "SELECT asdf.abcde AS _1 FROM a AS asdf",
- dialect=dialect
- )
+ self.assert_compile(s,
+ 'SELECT asdf.abcde AS _1 FROM a AS asdf',
+ dialect=dialect)
compiled = s.compile(dialect=dialect)
assert set(compiled.result_map['_1'][1]).issuperset([
- 'asdf_abcde',
- a1.c.abcde,
- '_1'
- ])
-
-
+ 'asdf_abcde', a1.c.abcde, '_1'])
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index 1b8068f22..db2eaa4fa 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -54,7 +54,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
Column(Integer(), ForeignKey('bat.blah'), doc="this is a col"),
Column('bar', Integer(), ForeignKey('bat.blah'), primary_key=True,
key='bar'),
- Column('bar', Integer(), info={'foo':'bar'}),
+ Column('bar', Integer(), info={'foo': 'bar'}),
]:
c2 = col.copy()
for attr in ('name', 'type', 'nullable',
@@ -148,21 +148,21 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
def test_dupe_tables(self):
metadata = self.metadata
Table('table1', metadata,
- Column('col1', Integer, primary_key=True),
- Column('col2', String(20)))
+ Column('col1', Integer, primary_key=True),
+ Column('col2', String(20)))
metadata.create_all()
Table('table1', metadata, autoload=True)
def go():
Table('table1', metadata,
- Column('col1', Integer, primary_key=True),
- Column('col2', String(20)))
+ Column('col1', Integer, primary_key=True),
+ Column('col2', String(20)))
assert_raises_message(
tsa.exc.InvalidRequestError,
- "Table 'table1' is already defined for this "\
- "MetaData instance. Specify 'extend_existing=True' "\
- "to redefine options and columns on an existing "\
- "Table object.",
+ "Table 'table1' is already defined for this "
+ "MetaData instance. Specify 'extend_existing=True' "
+ "to redefine options and columns on an existing "
+ "Table object.",
go
)
@@ -544,23 +544,23 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
for i, (name, metadata, schema, quote_schema,
exp_schema, exp_quote_schema) in enumerate([
- ('t1', m1, None, None, 'sch1', None),
- ('t2', m1, 'sch2', None, 'sch2', None),
- ('t3', m1, 'sch2', True, 'sch2', True),
- ('t4', m1, 'sch1', None, 'sch1', None),
- ('t1', m2, None, None, 'sch1', True),
- ('t2', m2, 'sch2', None, 'sch2', None),
- ('t3', m2, 'sch2', True, 'sch2', True),
- ('t4', m2, 'sch1', None, 'sch1', None),
- ('t1', m3, None, None, 'sch1', False),
- ('t2', m3, 'sch2', None, 'sch2', None),
- ('t3', m3, 'sch2', True, 'sch2', True),
- ('t4', m3, 'sch1', None, 'sch1', None),
- ('t1', m4, None, None, None, None),
- ('t2', m4, 'sch2', None, 'sch2', None),
- ('t3', m4, 'sch2', True, 'sch2', True),
- ('t4', m4, 'sch1', None, 'sch1', None),
- ]):
+ ('t1', m1, None, None, 'sch1', None),
+ ('t2', m1, 'sch2', None, 'sch2', None),
+ ('t3', m1, 'sch2', True, 'sch2', True),
+ ('t4', m1, 'sch1', None, 'sch1', None),
+ ('t1', m2, None, None, 'sch1', True),
+ ('t2', m2, 'sch2', None, 'sch2', None),
+ ('t3', m2, 'sch2', True, 'sch2', True),
+ ('t4', m2, 'sch1', None, 'sch1', None),
+ ('t1', m3, None, None, 'sch1', False),
+ ('t2', m3, 'sch2', None, 'sch2', None),
+ ('t3', m3, 'sch2', True, 'sch2', True),
+ ('t4', m3, 'sch1', None, 'sch1', None),
+ ('t1', m4, None, None, None, None),
+ ('t2', m4, 'sch2', None, 'sch2', None),
+ ('t3', m4, 'sch2', True, 'sch2', True),
+ ('t4', m4, 'sch1', None, 'sch1', None),
+ ]):
kw = {}
if schema is not None:
kw['schema'] = schema
@@ -568,10 +568,12 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
kw['quote_schema'] = quote_schema
t = Table(name, metadata, **kw)
eq_(t.schema, exp_schema, "test %d, table schema" % i)
- eq_(t.quote_schema, exp_quote_schema, "test %d, table quote_schema" % i)
+ eq_(t.quote_schema, exp_quote_schema,
+ "test %d, table quote_schema" % i)
seq = Sequence(name, metadata=metadata, **kw)
eq_(seq.schema, exp_schema, "test %d, seq schema" % i)
- eq_(seq.quote_schema, exp_quote_schema, "test %d, seq quote_schema" % i)
+ eq_(seq.quote_schema, exp_quote_schema,
+ "test %d, seq quote_schema" % i)
def test_manual_dependencies(self):
meta = MetaData()
@@ -696,8 +698,8 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL):
Column("col1", Integer),
prefixes=["VIRTUAL"])
self.assert_compile(
- schema.CreateTable(table2),
- "CREATE VIRTUAL TABLE temporary_table_2 (col1 INTEGER)"
+ schema.CreateTable(table2),
+ "CREATE VIRTUAL TABLE temporary_table_2 (col1 INTEGER)"
)
def test_table_info(self):
@@ -940,7 +942,7 @@ class UseExistingTest(fixtures.TablesTest):
Unicode), autoload=True)
assert_raises_message(
exc.InvalidRequestError,
- "Table 'users' is already defined for this "\
+ "Table 'users' is already defined for this "\
"MetaData instance.",
go
)
@@ -1551,7 +1553,8 @@ class CatchAllEventsTest(fixtures.TestBase):
def test_all_events(self):
canary = []
def before_attach(obj, parent):
- canary.append("%s->%s" % (obj.__class__.__name__, parent.__class__.__name__))
+ canary.append("%s->%s" % (obj.__class__.__name__,
+ parent.__class__.__name__))
def after_attach(obj, parent):
canary.append("%s->%s" % (obj.__class__.__name__, parent))
@@ -1586,7 +1589,8 @@ class CatchAllEventsTest(fixtures.TestBase):
def evt(target):
def before_attach(obj, parent):
- canary.append("%s->%s" % (target.__name__, parent.__class__.__name__))
+ canary.append("%s->%s" % (target.__name__,
+ parent.__class__.__name__))
def after_attach(obj, parent):
canary.append("%s->%s" % (target.__name__, parent))
@@ -1594,7 +1598,8 @@ class CatchAllEventsTest(fixtures.TestBase):
event.listen(target, "after_parent_attach", after_attach)
for target in [
- schema.ForeignKeyConstraint, schema.PrimaryKeyConstraint, schema.UniqueConstraint,
+ schema.ForeignKeyConstraint, schema.PrimaryKeyConstraint,
+ schema.UniqueConstraint,
schema.CheckConstraint
]:
evt(target)
@@ -1615,11 +1620,12 @@ class CatchAllEventsTest(fixtures.TestBase):
eq_(
canary,
[
- 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1',
- 'ForeignKeyConstraint->Table', 'ForeignKeyConstraint->t1',
- 'UniqueConstraint->Table', 'UniqueConstraint->t1',
- 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2',
- 'CheckConstraint->Table', 'CheckConstraint->t2',
- 'UniqueConstraint->Table', 'UniqueConstraint->t2'
+ 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1',
+ 'ForeignKeyConstraint->Table', 'ForeignKeyConstraint->t1',
+ 'UniqueConstraint->Table', 'UniqueConstraint->t1',
+ 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2',
+ 'CheckConstraint->Table', 'CheckConstraint->t2',
+ 'UniqueConstraint->Table', 'UniqueConstraint->t2'
]
)
+
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index b5f50aeea..293e629c8 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -190,10 +190,27 @@ class QueryTest(fixtures.TestBase):
try:
table.create(bind=engine, checkfirst=True)
i = insert_values(engine, table, values)
- assert i == assertvalues, "tablename: %s %r %r" % (table.name, repr(i), repr(assertvalues))
+ assert i == assertvalues, "tablename: %s %r %r" % \
+ (table.name, repr(i), repr(assertvalues))
finally:
table.drop(bind=engine)
+ @testing.only_on('sqlite+pysqlite')
+ @testing.provide_metadata
+ def test_lastrowid_zero(self):
+ from sqlalchemy.dialects import sqlite
+ eng = engines.testing_engine()
+ class ExcCtx(sqlite.base.SQLiteExecutionContext):
+ def get_lastrowid(self):
+ return 0
+ eng.dialect.execution_ctx_cls = ExcCtx
+ t = Table('t', MetaData(), Column('x', Integer, primary_key=True),
+ Column('y', Integer))
+ t.create(eng)
+ r = eng.execute(t.insert().values(y=5))
+ eq_(r.inserted_primary_key, [0])
+
+
@testing.fails_on('sqlite', "sqlite autoincremnt doesn't work with composite pks")
def test_misordered_lastrow(self):
related = Table('related', metadata,
@@ -1011,6 +1028,22 @@ class QueryTest(fixtures.TestBase):
lambda: row[u2.c.user_id]
)
+ def test_ambiguous_column_contains(self):
+ # ticket 2702. in 0.7 we'd get True, False.
+ # in 0.8, both columns are present so it's True;
+ # but when they're fetched you'll get the ambiguous error.
+ users.insert().execute(user_id=1, user_name='john')
+ result = select([
+ users.c.user_id,
+ addresses.c.user_id]).\
+ select_from(users.outerjoin(addresses)).execute()
+ row = result.first()
+
+ eq_(
+ set([users.c.user_id in row, addresses.c.user_id in row]),
+ set([True])
+ )
+
def test_ambiguous_column_by_col_plus_label(self):
users.insert().execute(user_id=1, user_name='john')
result = select([users.c.user_id,
diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py
index a182444e9..6a42b0625 100644
--- a/test/sql/test_returning.py
+++ b/test/sql/test_returning.py
@@ -88,26 +88,6 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults):
eq_(result.fetchall(), [(1,)])
- @testing.fails_on('postgresql', 'undefined behavior')
- @testing.fails_on('oracle+cx_oracle', 'undefined behavior')
- @testing.crashes('mssql+mxodbc', 'Raises an error')
- def test_insert_returning_execmany(self):
-
- # return value is documented as failing with psycopg2/executemany
- result2 = table.insert().returning(table).execute(
- [{'persons': 2, 'full': False}, {'persons': 3, 'full': True}])
-
- if testing.against('mssql+zxjdbc'):
- # jtds apparently returns only the first row
- eq_(result2.fetchall(), [(2, 2, False, None)])
- elif testing.against('firebird', 'mssql', 'oracle'):
- # Multiple inserts only return the last row
- eq_(result2.fetchall(), [(3, 3, True, None)])
- else:
- # nobody does this as far as we know (pg8000?)
- eq_(result2.fetchall(), [(2, 2, False, None), (3, 3, True, None)])
-
-
@testing.requires.multivalues_inserts
def test_multirow_returning(self):
ins = table.insert().returning(table.c.id, table.c.persons).values(
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index 30052a806..e881298a7 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -1587,3 +1587,156 @@ class AnnotationsTest(fixtures.TestBase):
comp2 = c2.comparator
assert (c2 == 5).left._annotations == {"foo": "bar", "bat": "hoho"}
+
+class WithLabelsTest(fixtures.TestBase):
+ def _assert_labels_warning(self, s):
+ assert_raises_message(
+ exc.SAWarning,
+ "replaced by another column with the same key",
+ lambda: s.c
+ )
+
+ def _assert_result_keys(self, s, keys):
+ compiled = s.compile()
+ eq_(set(compiled.result_map), set(keys))
+
+ def _assert_subq_result_keys(self, s, keys):
+ compiled = s.select().compile()
+ eq_(set(compiled.result_map), set(keys))
+
+ def _names_overlap(self):
+ m = MetaData()
+ t1 = Table('t1', m, Column('x', Integer))
+ t2 = Table('t2', m, Column('x', Integer))
+ return select([t1, t2])
+
+ def test_names_overlap_nolabel(self):
+ sel = self._names_overlap()
+ self._assert_labels_warning(sel)
+ self._assert_result_keys(sel, ['x'])
+
+ def test_names_overlap_label(self):
+ sel = self._names_overlap().apply_labels()
+ eq_(
+ sel.c.keys(),
+ ['t1_x', 't2_x']
+ )
+ self._assert_result_keys(sel, ['t1_x', 't2_x'])
+
+ def _names_overlap_keys_dont(self):
+ m = MetaData()
+ t1 = Table('t1', m, Column('x', Integer, key='a'))
+ t2 = Table('t2', m, Column('x', Integer, key='b'))
+ return select([t1, t2])
+
+ def test_names_overlap_keys_dont_nolabel(self):
+ sel = self._names_overlap_keys_dont()
+ eq_(
+ sel.c.keys(),
+ ['a', 'b']
+ )
+ self._assert_result_keys(sel, ['x'])
+
+ def test_names_overlap_keys_dont_label(self):
+ sel = self._names_overlap_keys_dont().apply_labels()
+ eq_(
+ sel.c.keys(),
+ ['t1_a', 't2_b']
+ )
+ self._assert_result_keys(sel, ['t1_x', 't2_x'])
+
+ def _labels_overlap(self):
+ m = MetaData()
+ t1 = Table('t', m, Column('x_id', Integer))
+ t2 = Table('t_x', m, Column('id', Integer))
+ return select([t1, t2])
+
+ def test_labels_overlap_nolabel(self):
+ sel = self._labels_overlap()
+ eq_(
+ sel.c.keys(),
+ ['x_id', 'id']
+ )
+ self._assert_result_keys(sel, ['x_id', 'id'])
+
+ def test_labels_overlap_label(self):
+ sel = self._labels_overlap().apply_labels()
+ t2 = sel.froms[1]
+ eq_(
+ sel.c.keys(),
+ ['t_x_id', t2.c.id.anon_label]
+ )
+ self._assert_result_keys(sel, ['t_x_id', 'id_1'])
+ self._assert_subq_result_keys(sel, ['t_x_id', 'id_1'])
+
+ def _labels_overlap_keylabels_dont(self):
+ m = MetaData()
+ t1 = Table('t', m, Column('x_id', Integer, key='a'))
+ t2 = Table('t_x', m, Column('id', Integer, key='b'))
+ return select([t1, t2])
+
+ def test_labels_overlap_keylabels_dont_nolabel(self):
+ sel = self._labels_overlap_keylabels_dont()
+ eq_(sel.c.keys(), ['a', 'b'])
+ self._assert_result_keys(sel, ['x_id', 'id'])
+
+ def test_labels_overlap_keylabels_dont_label(self):
+ sel = self._labels_overlap_keylabels_dont().apply_labels()
+ eq_(sel.c.keys(), ['t_a', 't_x_b'])
+ self._assert_result_keys(sel, ['t_x_id', 'id_1'])
+
+ def _keylabels_overlap_labels_dont(self):
+ m = MetaData()
+ t1 = Table('t', m, Column('a', Integer, key='x_id'))
+ t2 = Table('t_x', m, Column('b', Integer, key='id'))
+ return select([t1, t2])
+
+ def test_keylabels_overlap_labels_dont_nolabel(self):
+ sel = self._keylabels_overlap_labels_dont()
+ eq_(sel.c.keys(), ['x_id', 'id'])
+ self._assert_result_keys(sel, ['a', 'b'])
+
+ def test_keylabels_overlap_labels_dont_label(self):
+ sel = self._keylabels_overlap_labels_dont().apply_labels()
+ t2 = sel.froms[1]
+ eq_(sel.c.keys(), ['t_x_id', t2.c.id.anon_label])
+ self._assert_result_keys(sel, ['t_a', 't_x_b'])
+ self._assert_subq_result_keys(sel, ['t_a', 't_x_b'])
+
+ def _keylabels_overlap_labels_overlap(self):
+ m = MetaData()
+ t1 = Table('t', m, Column('x_id', Integer, key='x_a'))
+ t2 = Table('t_x', m, Column('id', Integer, key='a'))
+ return select([t1, t2])
+
+ def test_keylabels_overlap_labels_overlap_nolabel(self):
+ sel = self._keylabels_overlap_labels_overlap()
+ eq_(sel.c.keys(), ['x_a', 'a'])
+ self._assert_result_keys(sel, ['x_id', 'id'])
+ self._assert_subq_result_keys(sel, ['x_id', 'id'])
+
+ def test_keylabels_overlap_labels_overlap_label(self):
+ sel = self._keylabels_overlap_labels_overlap().apply_labels()
+ t2 = sel.froms[1]
+ eq_(sel.c.keys(), ['t_x_a', t2.c.a.anon_label])
+ self._assert_result_keys(sel, ['t_x_id', 'id_1'])
+ self._assert_subq_result_keys(sel, ['t_x_id', 'id_1'])
+
+ def _keys_overlap_names_dont(self):
+ m = MetaData()
+ t1 = Table('t1', m, Column('a', Integer, key='x'))
+ t2 = Table('t2', m, Column('b', Integer, key='x'))
+ return select([t1, t2])
+
+ def test_keys_overlap_names_dont_nolabel(self):
+ sel = self._keys_overlap_names_dont()
+ self._assert_labels_warning(sel)
+ self._assert_result_keys(sel, ['a', 'b'])
+
+ def test_keys_overlap_names_dont_label(self):
+ sel = self._keys_overlap_names_dont().apply_labels()
+ eq_(
+ sel.c.keys(),
+ ['t1_x', 't2_x']
+ )
+ self._assert_result_keys(sel, ['t1_a', 't2_b'])
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index 3c981e539..fac22a205 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -15,7 +15,6 @@ from sqlalchemy import testing
from sqlalchemy.testing import AssertsCompiledSQL, AssertsExecutionResults, \
engines, pickleable
from sqlalchemy.testing.util import picklers
-from sqlalchemy.util.compat import decimal
from sqlalchemy.testing.util import round_decimal
from sqlalchemy.testing import fixtures
diff --git a/test/sql/test_update.py b/test/sql/test_update.py
index b46489cd2..a8df86cd2 100644
--- a/test/sql/test_update.py
+++ b/test/sql/test_update.py
@@ -1,55 +1,53 @@
-from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, AssertsCompiledSQL
-import datetime
from sqlalchemy import *
-from sqlalchemy import exc, sql, util
-from sqlalchemy.engine import default, base
from sqlalchemy import testing
-from sqlalchemy.testing import fixtures
-from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.dialects import mysql
+from sqlalchemy.testing import AssertsCompiledSQL, eq_, fixtures
+from sqlalchemy.testing.schema import Table, Column
+
class _UpdateFromTestBase(object):
@classmethod
def define_tables(cls, metadata):
+ Table('mytable', metadata,
+ Column('myid', Integer),
+ Column('name', String(30)),
+ Column('description', String(50)))
+ Table('myothertable', metadata,
+ Column('otherid', Integer),
+ Column('othername', String(30)))
Table('users', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(30), nullable=False),
- )
-
+ test_needs_autoincrement=True),
+ Column('name', String(30), nullable=False))
Table('addresses', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id')),
Column('name', String(30), nullable=False),
- Column('email_address', String(50), nullable=False),
- )
-
- Table("dingalings", metadata,
+ Column('email_address', String(50), nullable=False))
+ Table('dingalings', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('address_id', None, ForeignKey('addresses.id')),
- Column('data', String(30)),
- )
+ Column('data', String(30)))
@classmethod
def fixtures(cls):
return dict(
- users = (
+ users=(
('id', 'name'),
(7, 'jack'),
(8, 'ed'),
(9, 'fred'),
(10, 'chuck')
),
-
addresses = (
('id', 'user_id', 'name', 'email_address'),
- (1, 7, 'x', "jack@bean.com"),
- (2, 8, 'x', "ed@wood.com"),
- (3, 8, 'x', "ed@bettyboop.com"),
- (4, 8, 'x', "ed@lala.com"),
- (5, 9, 'x', "fred@fred.com")
+ (1, 7, 'x', 'jack@bean.com'),
+ (2, 8, 'x', 'ed@wood.com'),
+ (3, 8, 'x', 'ed@bettyboop.com'),
+ (4, 8, 'x', 'ed@lala.com'),
+ (5, 9, 'x', 'fred@fred.com')
),
dingalings = (
('id', 'address_id', 'data'),
@@ -59,288 +57,462 @@ class _UpdateFromTestBase(object):
)
-class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
+class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_update_1(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(
+ update(table1, table1.c.myid == 7),
+ 'UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1',
+ params={table1.c.name: 'fred'})
+
+ def test_update_2(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(
+ table1.update().
+ where(table1.c.myid == 7).
+ values({table1.c.myid: 5}),
+ 'UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1',
+ checkparams={'myid': 5, 'myid_1': 7})
+
+ def test_update_3(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(
+ update(table1, table1.c.myid == 7),
+ 'UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1',
+ params={'name': 'fred'})
+
+ def test_update_4(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(
+ update(table1, values={table1.c.name: table1.c.myid}),
+ 'UPDATE mytable SET name=mytable.myid')
+
+ def test_update_5(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(
+ update(table1,
+ whereclause=table1.c.name == bindparam('crit'),
+ values={table1.c.name: 'hi'}),
+ 'UPDATE mytable SET name=:name WHERE mytable.name = :crit',
+ params={'crit': 'notthere'},
+ checkparams={'crit': 'notthere', 'name': 'hi'})
+
+ def test_update_6(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(
+ update(table1,
+ table1.c.myid == 12,
+ values={table1.c.name: table1.c.myid}),
+ 'UPDATE mytable '
+ 'SET name=mytable.myid, description=:description '
+ 'WHERE mytable.myid = :myid_1',
+ params={'description': 'test'},
+ checkparams={'description': 'test', 'myid_1': 12})
+
+ def test_update_7(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(
+ update(table1, table1.c.myid == 12, values={table1.c.myid: 9}),
+ 'UPDATE mytable '
+ 'SET myid=:myid, description=:description '
+ 'WHERE mytable.myid = :myid_1',
+ params={'myid_1': 12, 'myid': 9, 'description': 'test'})
+
+ def test_update_8(self):
+ table1 = self.tables.mytable
+
+ self.assert_compile(
+ update(table1, table1.c.myid == 12),
+ 'UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1',
+ params={'myid': 18}, checkparams={'myid': 18, 'myid_1': 12})
+
+ def test_update_9(self):
+ table1 = self.tables.mytable
+
+ s = table1.update(table1.c.myid == 12, values={table1.c.name: 'lala'})
+ c = s.compile(column_keys=['id', 'name'])
+ eq_(str(s), str(c))
+
+ def test_update_10(self):
+ table1 = self.tables.mytable
+
+ v1 = {table1.c.name: table1.c.myid}
+ v2 = {table1.c.name: table1.c.name + 'foo'}
+ self.assert_compile(
+ update(table1, table1.c.myid == 12, values=v1).values(v2),
+ 'UPDATE mytable '
+ 'SET '
+ 'name=(mytable.name || :name_1), '
+ 'description=:description '
+ 'WHERE mytable.myid = :myid_1',
+ params={'description': 'test'})
+
+ def test_update_11(self):
+ table1 = self.tables.mytable
+
+ values = {
+ table1.c.name: table1.c.name + 'lala',
+ table1.c.myid: func.do_stuff(table1.c.myid, literal('hoho'))
+ }
+ self.assert_compile(update(table1,
+ (table1.c.myid == func.hoho(4)) &
+ (table1.c.name == literal('foo') +
+ table1.c.name + literal('lala')),
+ values=values),
+ 'UPDATE mytable '
+ 'SET '
+ 'myid=do_stuff(mytable.myid, :param_1), '
+ 'name=(mytable.name || :name_1) '
+ 'WHERE '
+ 'mytable.myid = hoho(:hoho_1) AND '
+ 'mytable.name = :param_2 || mytable.name || :param_3')
+
+ def test_prefix_with(self):
+ table1 = self.tables.mytable
+
+ stmt = table1.update().\
+ prefix_with('A', 'B', dialect='mysql').\
+ prefix_with('C', 'D')
+
+ self.assert_compile(stmt,
+ 'UPDATE C D mytable SET myid=:myid, name=:name, '
+ 'description=:description')
+
+ self.assert_compile(stmt,
+ 'UPDATE A B C D mytable SET myid=%s, name=%s, description=%s',
+ dialect=mysql.dialect())
+
+ def test_alias(self):
+ table1 = self.tables.mytable
+ talias1 = table1.alias('t1')
+
+ self.assert_compile(update(talias1, talias1.c.myid == 7),
+ 'UPDATE mytable AS t1 '
+ 'SET name=:name '
+ 'WHERE t1.myid = :myid_1',
+ params={table1.c.name: 'fred'})
+
+ self.assert_compile(update(talias1, table1.c.myid == 7),
+ 'UPDATE mytable AS t1 '
+ 'SET name=:name '
+ 'FROM mytable '
+ 'WHERE mytable.myid = :myid_1',
+ params={table1.c.name: 'fred'})
+
+ def test_update_to_expression(self):
+ """test update from an expression.
+
+ this logic is triggered currently by a left side that doesn't
+ have a key. The current supported use case is updating the index
+ of a Postgresql ARRAY type.
+
+ """
+ table1 = self.tables.mytable
+ expr = func.foo(table1.c.myid)
+ assert not hasattr(expr, 'key')
+ self.assert_compile(table1.update().values({expr: 'bar'}),
+ 'UPDATE mytable SET foo(myid)=:param_1')
+
+
+class UpdateFromCompileTest(_UpdateFromTestBase, fixtures.TablesTest,
+ AssertsCompiledSQL):
__dialect__ = 'default'
run_create_tables = run_inserts = run_deletes = None
def test_render_table(self):
users, addresses = self.tables.users, self.tables.addresses
+
self.assert_compile(
- users.update().\
- values(name='newname').\
- where(users.c.id==addresses.c.user_id).\
- where(addresses.c.email_address=='e1'),
- "UPDATE users SET name=:name FROM addresses "
- "WHERE users.id = addresses.user_id AND "
- "addresses.email_address = :email_address_1",
- checkparams={u'email_address_1': 'e1', 'name': 'newname'}
- )
+ users.update().
+ values(name='newname').
+ where(users.c.id == addresses.c.user_id).
+ where(addresses.c.email_address == 'e1'),
+ 'UPDATE users '
+ 'SET name=:name FROM addresses '
+ 'WHERE '
+ 'users.id = addresses.user_id AND '
+ 'addresses.email_address = :email_address_1',
+ checkparams={u'email_address_1': 'e1', 'name': 'newname'})
def test_render_multi_table(self):
- users, addresses, dingalings = \
- self.tables.users, \
- self.tables.addresses, \
- self.tables.dingalings
+ users = self.tables.users
+ addresses = self.tables.addresses
+ dingalings = self.tables.dingalings
+
+ checkparams = {
+ u'email_address_1': 'e1',
+ u'id_1': 2,
+ 'name': 'newname'
+ }
+
self.assert_compile(
- users.update().\
- values(name='newname').\
- where(users.c.id==addresses.c.user_id).\
- where(addresses.c.email_address=='e1').\
- where(addresses.c.id==dingalings.c.address_id).\
- where(dingalings.c.id==2),
- "UPDATE users SET name=:name FROM addresses, "
- "dingalings WHERE users.id = addresses.user_id "
- "AND addresses.email_address = :email_address_1 "
- "AND addresses.id = dingalings.address_id AND "
- "dingalings.id = :id_1",
- checkparams={u'email_address_1': 'e1', u'id_1': 2,
- 'name': 'newname'}
- )
+ users.update().
+ values(name='newname').
+ where(users.c.id == addresses.c.user_id).
+ where(addresses.c.email_address == 'e1').
+ where(addresses.c.id == dingalings.c.address_id).
+ where(dingalings.c.id == 2),
+ 'UPDATE users '
+ 'SET name=:name '
+ 'FROM addresses, dingalings '
+ 'WHERE '
+ 'users.id = addresses.user_id AND '
+ 'addresses.email_address = :email_address_1 AND '
+ 'addresses.id = dingalings.address_id AND '
+ 'dingalings.id = :id_1',
+ checkparams=checkparams)
def test_render_table_mysql(self):
users, addresses = self.tables.users, self.tables.addresses
+
self.assert_compile(
- users.update().\
- values(name='newname').\
- where(users.c.id==addresses.c.user_id).\
- where(addresses.c.email_address=='e1'),
- "UPDATE users, addresses SET users.name=%s "
- "WHERE users.id = addresses.user_id AND "
- "addresses.email_address = %s",
+ users.update().
+ values(name='newname').
+ where(users.c.id == addresses.c.user_id).
+ where(addresses.c.email_address == 'e1'),
+ 'UPDATE users, addresses '
+ 'SET users.name=%s '
+ 'WHERE '
+ 'users.id = addresses.user_id AND '
+ 'addresses.email_address = %s',
checkparams={u'email_address_1': 'e1', 'name': 'newname'},
- dialect=mysql.dialect()
- )
+ dialect=mysql.dialect())
def test_render_subquery(self):
users, addresses = self.tables.users, self.tables.addresses
- subq = select([addresses.c.id,
- addresses.c.user_id,
- addresses.c.email_address]).\
- where(addresses.c.id==7).alias()
+
+ checkparams = {
+ u'email_address_1': 'e1',
+ u'id_1': 7,
+ 'name': 'newname'
+ }
+
+ cols = [
+ addresses.c.id,
+ addresses.c.user_id,
+ addresses.c.email_address
+ ]
+
+ subq = select(cols).where(addresses.c.id == 7).alias()
self.assert_compile(
- users.update().\
- values(name='newname').\
- where(users.c.id==subq.c.user_id).\
- where(subq.c.email_address=='e1'),
- "UPDATE users SET name=:name FROM "
- "(SELECT addresses.id AS id, addresses.user_id "
- "AS user_id, addresses.email_address AS "
- "email_address FROM addresses WHERE addresses.id = "
- ":id_1) AS anon_1 WHERE users.id = anon_1.user_id "
- "AND anon_1.email_address = :email_address_1",
- checkparams={u'email_address_1': 'e1',
- u'id_1': 7, 'name': 'newname'}
- )
+ users.update().
+ values(name='newname').
+ where(users.c.id == subq.c.user_id).
+ where(subq.c.email_address == 'e1'),
+ 'UPDATE users '
+ 'SET name=:name FROM ('
+ 'SELECT '
+ 'addresses.id AS id, '
+ 'addresses.user_id AS user_id, '
+ 'addresses.email_address AS email_address '
+ 'FROM addresses '
+ 'WHERE addresses.id = :id_1'
+ ') AS anon_1 '
+ 'WHERE users.id = anon_1.user_id '
+ 'AND anon_1.email_address = :email_address_1',
+ checkparams=checkparams)
+
class UpdateFromRoundTripTest(_UpdateFromTestBase, fixtures.TablesTest):
@testing.requires.update_from
def test_exec_two_table(self):
users, addresses = self.tables.users, self.tables.addresses
+
testing.db.execute(
- addresses.update().\
- values(email_address=users.c.name).\
- where(users.c.id==addresses.c.user_id).\
- where(users.c.name=='ed')
- )
- eq_(
- testing.db.execute(
- addresses.select().\
- order_by(addresses.c.id)).fetchall(),
- [
- (1, 7, 'x', "jack@bean.com"),
- (2, 8, 'x', "ed"),
- (3, 8, 'x', "ed"),
- (4, 8, 'x', "ed"),
- (5, 9, 'x', "fred@fred.com")
- ]
- )
+ addresses.update().
+ values(email_address=users.c.name).
+ where(users.c.id == addresses.c.user_id).
+ where(users.c.name == 'ed'))
+
+ expected = [
+ (1, 7, 'x', 'jack@bean.com'),
+ (2, 8, 'x', 'ed'),
+ (3, 8, 'x', 'ed'),
+ (4, 8, 'x', 'ed'),
+ (5, 9, 'x', 'fred@fred.com')]
+ self._assert_addresses(addresses, expected)
@testing.requires.update_from
def test_exec_two_table_plus_alias(self):
users, addresses = self.tables.users, self.tables.addresses
- a1 = addresses.alias()
+ a1 = addresses.alias()
testing.db.execute(
- addresses.update().\
- values(email_address=users.c.name).\
- where(users.c.id==a1.c.user_id).\
- where(users.c.name=='ed').\
- where(a1.c.id==addresses.c.id)
- )
- eq_(
- testing.db.execute(
- addresses.select().\
- order_by(addresses.c.id)).fetchall(),
- [
- (1, 7, 'x', "jack@bean.com"),
- (2, 8, 'x', "ed"),
- (3, 8, 'x', "ed"),
- (4, 8, 'x', "ed"),
- (5, 9, 'x', "fred@fred.com")
- ]
+ addresses.update().
+ values(email_address=users.c.name).
+ where(users.c.id == a1.c.user_id).
+ where(users.c.name == 'ed').
+ where(a1.c.id == addresses.c.id)
)
+ expected = [
+ (1, 7, 'x', 'jack@bean.com'),
+ (2, 8, 'x', 'ed'),
+ (3, 8, 'x', 'ed'),
+ (4, 8, 'x', 'ed'),
+ (5, 9, 'x', 'fred@fred.com')]
+ self._assert_addresses(addresses, expected)
+
@testing.requires.update_from
def test_exec_three_table(self):
- users, addresses, dingalings = \
- self.tables.users, \
- self.tables.addresses, \
- self.tables.dingalings
+ users = self.tables.users
+ addresses = self.tables.addresses
+ dingalings = self.tables.dingalings
+
testing.db.execute(
- addresses.update().\
- values(email_address=users.c.name).\
- where(users.c.id==addresses.c.user_id).\
- where(users.c.name=='ed').
- where(addresses.c.id==dingalings.c.address_id).\
- where(dingalings.c.id==1),
- )
- eq_(
- testing.db.execute(
- addresses.select().order_by(addresses.c.id)
- ).fetchall(),
- [
- (1, 7, 'x', "jack@bean.com"),
- (2, 8, 'x', "ed"),
- (3, 8, 'x', "ed@bettyboop.com"),
- (4, 8, 'x', "ed@lala.com"),
- (5, 9, 'x', "fred@fred.com")
- ]
- )
+ addresses.update().
+ values(email_address=users.c.name).
+ where(users.c.id == addresses.c.user_id).
+ where(users.c.name == 'ed').
+ where(addresses.c.id == dingalings.c.address_id).
+ where(dingalings.c.id == 1))
+
+ expected = [
+ (1, 7, 'x', 'jack@bean.com'),
+ (2, 8, 'x', 'ed'),
+ (3, 8, 'x', 'ed@bettyboop.com'),
+ (4, 8, 'x', 'ed@lala.com'),
+ (5, 9, 'x', 'fred@fred.com')]
+ self._assert_addresses(addresses, expected)
@testing.only_on('mysql', 'Multi table update')
def test_exec_multitable(self):
users, addresses = self.tables.users, self.tables.addresses
+
+ values = {
+ addresses.c.email_address: users.c.name,
+ users.c.name: 'ed2'
+ }
+
testing.db.execute(
- addresses.update().\
- values({
- addresses.c.email_address:users.c.name,
- users.c.name:'ed2'
- }).\
- where(users.c.id==addresses.c.user_id).\
- where(users.c.name=='ed')
- )
- eq_(
- testing.db.execute(
- addresses.select().order_by(addresses.c.id)).fetchall(),
- [
- (1, 7, 'x', "jack@bean.com"),
- (2, 8, 'x', "ed"),
- (3, 8, 'x', "ed"),
- (4, 8, 'x', "ed"),
- (5, 9, 'x', "fred@fred.com")
- ]
- )
- eq_(
- testing.db.execute(
- users.select().order_by(users.c.id)).fetchall(),
- [
- (7, 'jack'),
- (8, 'ed2'),
- (9, 'fred'),
- (10, 'chuck')
- ]
- )
+ addresses.update().
+ values(values).
+ where(users.c.id == addresses.c.user_id).
+ where(users.c.name == 'ed'))
+
+ expected = [
+ (1, 7, 'x', 'jack@bean.com'),
+ (2, 8, 'x', 'ed'),
+ (3, 8, 'x', 'ed'),
+ (4, 8, 'x', 'ed'),
+ (5, 9, 'x', 'fred@fred.com')]
+ self._assert_addresses(addresses, expected)
+
+ expected = [
+ (7, 'jack'),
+ (8, 'ed2'),
+ (9, 'fred'),
+ (10, 'chuck')]
+ self._assert_users(users, expected)
-class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase, fixtures.TablesTest):
+ def _assert_addresses(self, addresses, expected):
+ stmt = addresses.select().order_by(addresses.c.id)
+ eq_(testing.db.execute(stmt).fetchall(), expected)
+
+ def _assert_users(self, users, expected):
+ stmt = users.select().order_by(users.c.id)
+ eq_(testing.db.execute(stmt).fetchall(), expected)
+
+
+class UpdateFromMultiTableUpdateDefaultsTest(_UpdateFromTestBase,
+ fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('name', String(30), nullable=False),
- Column('some_update', String(30), onupdate="im the update")
- )
+ Column('some_update', String(30), onupdate='im the update'))
Table('addresses', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id')),
- Column('email_address', String(50), nullable=False),
- )
+ Column('email_address', String(50), nullable=False))
@classmethod
def fixtures(cls):
return dict(
- users = (
+ users=(
('id', 'name', 'some_update'),
(8, 'ed', 'value'),
(9, 'fred', 'value'),
),
-
- addresses = (
+ addresses=(
('id', 'user_id', 'email_address'),
- (2, 8, "ed@wood.com"),
- (3, 8, "ed@bettyboop.com"),
- (4, 9, "fred@fred.com")
+ (2, 8, 'ed@wood.com'),
+ (3, 8, 'ed@bettyboop.com'),
+ (4, 9, 'fred@fred.com')
),
)
@testing.only_on('mysql', 'Multi table update')
def test_defaults_second_table(self):
users, addresses = self.tables.users, self.tables.addresses
+
+ values = {
+ addresses.c.email_address: users.c.name,
+ users.c.name: 'ed2'
+ }
+
ret = testing.db.execute(
- addresses.update().\
- values({
- addresses.c.email_address:users.c.name,
- users.c.name:'ed2'
- }).\
- where(users.c.id==addresses.c.user_id).\
- where(users.c.name=='ed')
- )
- eq_(
- set(ret.prefetch_cols()),
- set([users.c.some_update])
- )
- eq_(
- testing.db.execute(
- addresses.select().order_by(addresses.c.id)).fetchall(),
- [
- (2, 8, "ed"),
- (3, 8, "ed"),
- (4, 9, "fred@fred.com")
- ]
- )
- eq_(
- testing.db.execute(
- users.select().order_by(users.c.id)).fetchall(),
- [
- (8, 'ed2', 'im the update'),
- (9, 'fred', 'value'),
- ]
- )
+ addresses.update().
+ values(values).
+ where(users.c.id == addresses.c.user_id).
+ where(users.c.name == 'ed'))
+
+ eq_(set(ret.prefetch_cols()), set([users.c.some_update]))
+
+ expected = [
+ (2, 8, 'ed'),
+ (3, 8, 'ed'),
+ (4, 9, 'fred@fred.com')]
+ self._assert_addresses(addresses, expected)
+
+ expected = [
+ (8, 'ed2', 'im the update'),
+ (9, 'fred', 'value')]
+ self._assert_users(users, expected)
@testing.only_on('mysql', 'Multi table update')
def test_no_defaults_second_table(self):
users, addresses = self.tables.users, self.tables.addresses
+
ret = testing.db.execute(
- addresses.update().\
- values({
- 'email_address':users.c.name,
- }).\
- where(users.c.id==addresses.c.user_id).\
- where(users.c.name=='ed')
- )
- eq_(
- ret.prefetch_cols(),[]
- )
- eq_(
- testing.db.execute(
- addresses.select().order_by(addresses.c.id)).fetchall(),
- [
- (2, 8, "ed"),
- (3, 8, "ed"),
- (4, 9, "fred@fred.com")
- ]
- )
- # users table not actually updated,
- # so no onupdate
- eq_(
- testing.db.execute(
- users.select().order_by(users.c.id)).fetchall(),
- [
- (8, 'ed', 'value'),
- (9, 'fred', 'value'),
- ]
- )
+ addresses.update().
+ values({'email_address': users.c.name}).
+ where(users.c.id == addresses.c.user_id).
+ where(users.c.name == 'ed'))
+
+ eq_(ret.prefetch_cols(), [])
+
+ expected = [
+ (2, 8, 'ed'),
+ (3, 8, 'ed'),
+ (4, 9, 'fred@fred.com')]
+ self._assert_addresses(addresses, expected)
+
+ # users table not actually updated, so no onupdate
+ expected = [
+ (8, 'ed', 'value'),
+ (9, 'fred', 'value')]
+ self._assert_users(users, expected)
+
+ def _assert_addresses(self, addresses, expected):
+ stmt = addresses.select().order_by(addresses.c.id)
+ eq_(testing.db.execute(stmt).fetchall(), expected)
+
+ def _assert_users(self, users, expected):
+ stmt = users.select().order_by(users.c.id)
+ eq_(testing.db.execute(stmt).fetchall(), expected)