summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2010-11-06 11:49:45 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2010-11-06 11:49:45 -0400
commit4e2c0f10cd164511b9c6377b72a8c0527e4eb716 (patch)
tree830319060dc68ec7de0eaa04eaf8ba8e7948d535
parentd9dc05adb689bc4eab2227a96af0d874696cc63d (diff)
parent30bc42403754110df1fdec3037c7700cc4f26b70 (diff)
downloadsqlalchemy-4e2c0f10cd164511b9c6377b72a8c0527e4eb716.tar.gz
- merge tip
-rwxr-xr-x.hgignore3
-rw-r--r--.hgtags1
-rw-r--r--CHANGES134
-rw-r--r--doc/build/core/connections.rst5
-rw-r--r--doc/build/core/engines.rst10
-rw-r--r--doc/build/core/expression_api.rst2
-rw-r--r--doc/build/orm/session.rst98
-rw-r--r--examples/association/basic_association.py10
-rw-r--r--examples/association/proxied_association.py10
-rw-r--r--examples/custom_attributes/custom_management.py60
-rw-r--r--examples/sharding/attribute_shard.py4
-rw-r--r--examples/vertical/dictlike-polymorphic.py11
-rw-r--r--examples/vertical/dictlike.py11
-rw-r--r--lib/sqlalchemy/__init__.py3
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py79
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py6
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py4
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py8
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py70
-rw-r--r--lib/sqlalchemy/engine/__init__.py7
-rw-r--r--lib/sqlalchemy/engine/default.py1
-rwxr-xr-xlib/sqlalchemy/ext/declarative.py89
-rw-r--r--lib/sqlalchemy/orm/__init__.py50
-rw-r--r--lib/sqlalchemy/orm/dynamic.py2
-rw-r--r--lib/sqlalchemy/orm/interfaces.py34
-rw-r--r--lib/sqlalchemy/orm/mapper.py23
-rw-r--r--lib/sqlalchemy/orm/properties.py2
-rw-r--r--lib/sqlalchemy/orm/query.py65
-rw-r--r--lib/sqlalchemy/orm/session.py66
-rw-r--r--lib/sqlalchemy/orm/state.py10
-rw-r--r--lib/sqlalchemy/orm/strategies.py62
-rw-r--r--lib/sqlalchemy/schema.py12
-rw-r--r--lib/sqlalchemy/sql/__init__.py1
-rw-r--r--lib/sqlalchemy/sql/compiler.py8
-rw-r--r--lib/sqlalchemy/sql/expression.py52
-rw-r--r--lib/sqlalchemy/test/__init__.py3
-rw-r--r--lib/sqlalchemy/test/engines.py2
-rw-r--r--lib/sqlalchemy/test/profiling.py2
-rw-r--r--lib/sqlalchemy/test/testing.py7
-rw-r--r--lib/sqlalchemy/types.py66
-rw-r--r--lib/sqlalchemy/util.py14
-rw-r--r--lib/sqlalchemy_nose/__init__.py0
-rw-r--r--lib/sqlalchemy_nose/config.py (renamed from lib/sqlalchemy/test/config.py)0
-rw-r--r--lib/sqlalchemy_nose/noseplugin.py (renamed from lib/sqlalchemy/test/noseplugin.py)10
-rw-r--r--setup.py12
-rwxr-xr-xsqla_nose.py13
-rw-r--r--test/__init__.py1
-rw-r--r--test/dialect/test_mssql.py119
-rw-r--r--test/dialect/test_mysql.py17
-rw-r--r--test/dialect/test_oracle.py7
-rw-r--r--test/dialect/test_postgresql.py69
-rw-r--r--test/engine/test_parseconnect.py16
-rw-r--r--test/engine/test_reflection.py40
-rw-r--r--test/ext/test_declarative.py46
-rw-r--r--test/orm/test_eager_relations.py112
-rw-r--r--test/orm/test_immediate_load.py49
-rw-r--r--test/orm/test_query.py295
-rw-r--r--test/orm/test_unitofwork.py19
-rw-r--r--test/sql/test_compiler.py396
-rw-r--r--test/sql/test_types.py138
62 files changed, 1967 insertions, 503 deletions
diff --git a/.hgignore b/.hgignore
index ef7855fae..a4a0ce2bc 100755
--- a/.hgignore
+++ b/.hgignore
@@ -4,4 +4,7 @@ syntax:regexp
.pyc$
.orig$
.egg-info
+.*,cover
+\.coverage
+\.DS_Store
test.cfg
diff --git a/.hgtags b/.hgtags
index 85b12076c..4989d4f72 100644
--- a/.hgtags
+++ b/.hgtags
@@ -75,3 +75,4 @@ bb99158e5821d56344df21ed46128c2ea79bf2bd rel_0_6_1
30b7ef7a9a9c24abdc86bbde332ad3e6213bff2d rel_0_6_2
1db7766705b70f5326b614699b7c06d46168d19d rel_0_6_3
2db46b7f51c1e64f06d9c31c49ff6e15be98e9ca rel_0_6_4
+ff12fe7cac947ce9e28be4e8b4473df2c0adaa04 rel_0_6_5
diff --git a/CHANGES b/CHANGES
index ab5e240bf..65715de54 100644
--- a/CHANGES
+++ b/CHANGES
@@ -3,9 +3,50 @@
=======
CHANGES
=======
+0.6.6
+=====
+- orm
+ - Fixed bug whereby a non-"mutable" attribute modified event
+ which occurred on an object that was clean except for
+ preceding mutable attribute changes would fail to strongly
+ reference itself in the identity map. This would cause the
+ object to be garbage collected, losing track of any changes
+ that weren't previously saved in the "mutable changes"
+ dictionary.
+
+ - "innerjoin" flag doesn't take effect along the chain
+ of joinedload() joins if a previous join in that chain
+ is an outer join, thus allowing primary rows without
+ a referenced child row to be correctly returned
+ in results. [ticket:1954]
+
+- mysql
+ - Fixed error handling for Jython + zxjdbc, such that
+ has_table() property works again. Regression from
+ 0.6.3 (we don't have a Jython buildbot, sorry)
+ [ticket:1960]
+
0.6.5
=====
- orm
+ - Added a new "lazyload" option "immediateload".
+ Issues the usual "lazy" load operation automatically
+ as the object is populated. The use case
+ here is when loading objects to be placed in
+ an offline cache, or otherwise used after
+ the session isn't available, and straight 'select'
+ loading, not 'joined' or 'subquery', is desired.
+ [ticket:1914]
+
+ - New Query methods: query.label(name), query.as_scalar(),
+ return the query's statement as a scalar subquery
+ with /without label [ticket:1920];
+ query.with_entities(*ent), replaces the SELECT list of
+ the query with new entities.
+ Roughly equivalent to a generative form of query.values()
+ which accepts mapped entities as well as column
+ expressions.
+
- Fixed recursion bug which could occur when moving
an object from one reference to another, with
backrefs involved, where the initiating parent
@@ -16,6 +57,10 @@ CHANGES
passed an empty list to "include_properties" on
mapper() [ticket:1918]
+ - Fixed labeling bug in Query whereby the NamedTuple
+ would mis-apply labels if any of the column
+ expressions were un-labeled.
+
- Patched a case where query.join() would adapt the
right side to the right side of the left's join
inappropriately [ticket:1925]
@@ -118,6 +163,33 @@ CHANGES
[ticket:1932]
- sql
+ - Fixed bug in TypeDecorator whereby the dialect-specific
+ type was getting pulled in to generate the DDL for a
+ given type, which didn't always return the correct result.
+
+ - TypeDecorator can now have a fully constructed type
+ specified as its "impl", in addition to a type class.
+
+ - TypeDecorator will now place itself as the resulting
+ type for a binary expression where the type coercion
+ rules would normally return its impl type - previously,
+ a copy of the impl type would be returned which would
+ have the TypeDecorator embedded into it as the "dialect"
+ impl, this was probably an unintentional way of achieving
+ the desired effect.
+
+ - TypeDecorator.load_dialect_impl() returns "self.impl" by
+ default, i.e. not the dialect implementation type of
+ "self.impl". This to support compilation correctly.
+ Behavior can be user-overridden in exactly the same way
+ as before to the same effect.
+
+ - Added type_coerce(expr, type_) expression element.
+ Treats the given expression as the given type when evaluating
+ expressions and processing result rows, but does not
+ affect the generation of SQL, other than an anonymous
+ label.
+
- Table.tometadata() now copies Index objects associated
with the Table as well.
@@ -158,7 +230,11 @@ CHANGES
inheritance scheme where the attribute name is
different than that of the column. [ticket:1930],
[ticket:1931].
-
+
+ - A mixin can now specify a column that overrides
+ a column of the same name associated with a superclass.
+ Thanks to Oystein Haaland.
+
- engine
- Fixed a regression in 0.6.4 whereby the change that
@@ -171,12 +247,66 @@ CHANGES
- the logging message emitted by the engine when
a connection is first used is now "BEGIN (implicit)"
to emphasize that DBAPI has no explicit begin().
+
+ - added "views=True" option to metadata.reflect(),
+ will add the list of available views to those
+ being reflected. [ticket:1936]
+
+ - engine_from_config() now accepts 'debug' for
+ 'echo', 'echo_pool', 'force' for 'convert_unicode',
+ boolean values for 'use_native_unicode'.
+ [ticket:1899]
+- postgresql
+ - Added "as_tuple" flag to ARRAY type, returns results
+ as tuples instead of lists to allow hashing.
+
+ - Fixed bug which prevented "domain" built from a
+ custom type such as "enum" from being reflected.
+ [ticket:1933]
+
+- mysql
+ - Fixed bug involving reflection of CURRENT_TIMESTAMP
+ default used with ON UPDATE clause, thanks to
+ Taavi Burns [ticket:1940]
+
+- oracle
+ - The implicit_retunring argument to create_engine()
+ is now honored regardless of detected version of
+ Oracle. Previously, the flag would be forced
+ to False if server version info was < 10.
+ [ticket:1878]
+
+- mssql
+ - Fixed reflection bug which did not properly handle
+ reflection of unknown types. [ticket:1946]
+
+ - Fixed bug where aliasing of tables with "schema" would
+ fail to compile properly. [ticket:1943]
+
+ - Rewrote the reflection of indexes to use sys.
+ catalogs, so that column names of any configuration
+ (spaces, embedded commas, etc.) can be reflected.
+ Note that reflection of indexes requires SQL
+ Server 2005 or greater. [ticket:1770]
+
+ - mssql+pymssql dialect now honors the "port" portion
+ of the URL instead of discarding it. [ticket:1952]
+
- informix
- *Major* cleanup / modernization of the Informix
dialect for 0.6, courtesy Florian Apolloner.
[ticket:1906]
-
+
+- tests
+ - the NoseSQLAlchemyPlugin has been moved to a
+ new package "sqlalchemy_nose" which installs
+ along with "sqlalchemy". This so that the "nosetests"
+ script works as always but also allows the
+ --with-coverage option to turn on coverage before
+ SQLAlchemy modules are imported, allowing coverage
+ to work correctly.
+
- misc
- CircularDependencyError now has .cycles and .edges
members, which are the set of elements involved in
diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst
index 91e1d698b..7591e73e2 100644
--- a/doc/build/core/connections.rst
+++ b/doc/build/core/connections.rst
@@ -144,6 +144,11 @@ guaranteed to ``rollback()`` or ``commit()``::
trans.rollback()
raise
+.. _connections_nested_transactions:
+
+Nesting of Transaction Blocks
+------------------------------
+
The :class:`~sqlalchemy.engine.base.Transaction` object also handles "nested"
behavior by keeping track of the outermost begin/commit pair. In this example,
two functions both issue a transaction on a Connection, but only the outermost
diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst
index 576bace36..3295976fc 100644
--- a/doc/build/core/engines.rst
+++ b/doc/build/core/engines.rst
@@ -21,6 +21,7 @@ of the database.
Creating an engine is just a matter of issuing a single call,
:func:`.create_engine()`::
+ from sqlalchemy import create_engine
engine = create_engine('postgresql://scott:tiger@localhost:5432/mydatabase')
The above engine invokes the ``postgresql`` dialect and a connection pool
@@ -76,9 +77,9 @@ pyodbc_ ``mssql+pyodbc``\* yes development
pymssql_ ``mssql+pymssql`` yes development no yes yes
**MySQL**
`MySQL Connector/J`_ ``mysql+zxjdbc`` no no yes yes yes
-`MySQL Connector/Python`_ ``mysql+mysqlconnector`` yes partial no yes yes
+`MySQL Connector/Python`_ ``mysql+mysqlconnector`` yes yes no yes yes
mysql-python_ ``mysql+mysqldb``\* yes development no yes yes
-OurSQL_ ``mysql+oursql`` yes partial no yes yes
+OurSQL_ ``mysql+oursql`` yes yes no yes yes
**Oracle**
cx_oracle_ ``oracle+cx_oracle``\* yes development no yes yes
`Oracle JDBC Driver`_ ``oracle+zxjdbc`` no no yes yes yes
@@ -93,9 +94,10 @@ sqlite3_ ``sqlite+pysqlite``\* yes yes
**Sybase ASE**
mxodbc_ ``sybase+mxodbc`` development development no yes yes
pyodbc_ ``sybase+pyodbc``\* partial development no unknown unknown
-python-sybase_ ``sybase+pysybase`` partial development no yes yes
+python-sybase_ ``sybase+pysybase`` yes [1]_ development no yes yes
========================= =========================== =========== =========== =========== ================= ============
+.. [1] The Sybase dialect currently lacks the ability to reflect tables.
.. _psycopg2: http://www.initd.org/
.. _pg8000: http://pybrary.net/pg8000/
.. _pypostgresql: http://python.projects.postgresql.org/
@@ -120,7 +122,7 @@ python-sybase_ ``sybase+pysybase`` partial development
.. _sapdb: http://www.sapdb.org/sapdbapi.html
.. _python-sybase: http://python-sybase.sourceforge.net/
-Further detail on dialects is available at :ref:`sqlalchemy.dialects_toplevel`
+Further detail on dialects is available at :ref:`dialect_toplevel`
as well as additional notes on the wiki at `Database Notes
<http://www.sqlalchemy.org/trac/wiki/DatabaseNotes>`_
diff --git a/doc/build/core/expression_api.rst b/doc/build/core/expression_api.rst
index c39701a59..1b4a35f84 100644
--- a/doc/build/core/expression_api.rst
+++ b/doc/build/core/expression_api.rst
@@ -115,6 +115,8 @@ The expression package uses functions to construct SQL expressions. The return
.. autofunction:: tuple_
+.. autofunction:: type_coerce
+
.. autofunction:: union
.. autofunction:: union_all
diff --git a/doc/build/orm/session.rst b/doc/build/orm/session.rst
index 16ca7aff0..061f16f7e 100644
--- a/doc/build/orm/session.rst
+++ b/doc/build/orm/session.rst
@@ -996,6 +996,8 @@ statement::
item1.foo = 'bar'
item2.bar = 'foo'
+.. _session_begin_nested:
+
Using SAVEPOINT
---------------
@@ -1028,6 +1030,102 @@ session is expired, thus causing all subsequent attribute/instance access to
reference the full state of the :class:`~sqlalchemy.orm.session.Session` right
before :func:`~sqlalchemy.orm.session.Session.begin_nested` was called.
+.. _session_subtransactions:
+
+Using Subtransactions
+---------------------
+
+A subtransaction, as offered by the ``subtransactions=True`` flag of :meth:`.Session.begin`,
+is a non-transactional, delimiting construct that
+allows nesting of calls to :meth:`~.Session.begin` and :meth:`~.Session.commit`.
+It's purpose is to allow the construction of code that can function within a transaction
+both independently of any external code that starts a transaction,
+as well as within a block that has already demarcated a transaction. By "non-transactional", we
+mean that no actual transactional dialogue with the database is generated by this flag beyond that of
+a single call to :meth:`~.Session.begin`, regardless of how many times the method
+is called within a transaction.
+
+The subtransaction feature is in fact intrinsic to any call to :meth:`~.Session.flush`, which uses
+it internally to ensure that the series of flush steps are enclosed within a transaction,
+regardless of the setting of ``autocommit`` or the presence of an existing transactional context.
+However, explicit usage of the ``subtransactions=True`` flag is generally only useful with an
+application that uses the
+:class:`.Session` in "autocommit=True" mode, and calls :meth:`~.Session.begin` explicitly
+in order to demarcate transactions. For this reason the subtransaction feature is not
+commonly used in an explicit way, except for apps that integrate SQLAlchemy-level transaction control with
+the transaction control of another library or subsystem. For true, general purpose "nested"
+transactions, where a rollback affects only a portion of the work which has proceeded,
+savepoints should be used, documented in :ref:`session_begin_nested`.
+
+The feature is the ORM equivalent to the pattern described at :ref:`connections_nested_transactions`,
+where any number of functions can call :meth:`.Connection.begin` and :meth:`.Transaction.commit`
+as though they are the initiator of the transaction, but in fact may be participating
+in an already ongoing transaction.
+
+As is the case with the non-ORM :class:`.Transaction` object,
+calling :meth:`.Session.rollback` rolls back the **entire**
+transaction, which was initiated by the first call to
+:meth:`.Session.begin` (whether this call was explicit by the
+end user, or implicit in an ``autocommit=False`` scenario).
+However, the :class:`.Session` still considers itself to be in a
+"partially rolled back" state until :meth:`.Session.rollback` is
+called explicitly for each call that was made to
+:meth:`.Session.begin`, where "partially rolled back" means that
+no further SQL operations can proceed until each level
+of the transaction has been acounted for, unless the :meth:`~.Session.close` method
+is called which cancels all transactional markers. For a full exposition on
+the rationale for this,
+please see "`But why isn't the one automatic call to ROLLBACK
+enough ? Why must I ROLLBACK again?
+<http://www.sqlalchemy.org/trac/wiki/FAQ#ButwhyisnttheoneautomaticcalltoROLLBACKenoughWhymustIROLLBACKagain>`_".
+The general theme is that if subtransactions are used as intended, that is, as a means to nest multiple
+begin/commit pairs, the appropriate rollback calls naturally occur in any case, and allow the session's
+nesting of transactional pairs to function in a simple and predictable way
+without the need to guess as to what level is active.
+
+An example of ``subtransactions=True`` is nearly identical to
+that of the non-ORM technique. The nesting of transactions, as
+well as the natural presence of "rollback" for all transactions
+should an exception occur, is illustrated::
+
+ # method_a starts a transaction and calls method_b
+ def method_a(session):
+ session.begin(subtransactions=True) # open a transaction. If there was
+ # no previous call to begin(), this will
+ # begin a real transaction (meaning, a
+ # DBAPI connection is procured, which as
+ # per the DBAPI specification is in a transactional
+ # state ready to be committed or rolled back)
+ try:
+ method_b(session)
+ session.commit() # transaction is committed here
+ except:
+ session.rollback() # rolls back the transaction
+ raise
+
+ # method_b also starts a transaction
+ def method_b(connection):
+ session.begin(subtransactions=True) # open a transaction - this
+ # runs in the context of method_a()'s
+ # transaction
+ try:
+ session.add(SomeObject('bat', 'lala'))
+ session.commit() # transaction is not committed yet
+ except:
+ session.rollback() # rolls back the transaction, in this case
+ # the one that was initiated in method_a().
+ raise
+
+ # create a Session and call method_a
+ session = Session(autocommit=True)
+ method_a(session)
+ session.close()
+
+Since the :meth:`.Session.flush` method uses a subtransaction, a failed flush
+will always issue a rollback which then affects the state of the outermost transaction (unless a SAVEPOINT
+is in use). This forces the need to issue :meth:`~.Session.rollback` for the full operation
+before subsequent SQL operations can proceed.
+
Enabling Two-Phase Commit
-------------------------
diff --git a/examples/association/basic_association.py b/examples/association/basic_association.py
index 9c280d7ea..d3d764167 100644
--- a/examples/association/basic_association.py
+++ b/examples/association/basic_association.py
@@ -14,7 +14,7 @@ from datetime import datetime
from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
String, DateTime, Numeric, ForeignKey, and_)
-from sqlalchemy.orm import mapper, relationship, create_session
+from sqlalchemy.orm import mapper, relationship, Session
# Uncomment these to watch database activity.
#import logging
@@ -70,14 +70,14 @@ mapper(OrderItem, orderitems, properties={
'item': relationship(Item, lazy='joined')
})
-session = create_session()
+session = Session()
# create our catalog
session.add(Item('SA T-Shirt', 10.99))
session.add(Item('SA Mug', 6.50))
session.add(Item('SA Hat', 8.99))
session.add(Item('MySQL Crowbar', 16.99))
-session.flush()
+session.commit()
# function to return items from the DB
def item(name):
@@ -91,9 +91,7 @@ order.order_items.append(OrderItem(item('SA Mug')))
order.order_items.append(OrderItem(item('MySQL Crowbar'), 10.99))
order.order_items.append(OrderItem(item('SA Hat')))
session.add(order)
-session.flush()
-
-session.expunge_all()
+session.commit()
# query the order, print items
order = session.query(Order).filter_by(customer_name='john smith').one()
diff --git a/examples/association/proxied_association.py b/examples/association/proxied_association.py
index ac258121e..fa41f21c3 100644
--- a/examples/association/proxied_association.py
+++ b/examples/association/proxied_association.py
@@ -4,7 +4,7 @@ the usage of the associationproxy extension."""
from datetime import datetime
from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
String, DateTime, Float, ForeignKey, and_)
-from sqlalchemy.orm import mapper, relationship, create_session
+from sqlalchemy.orm import mapper, relationship, Session
from sqlalchemy.ext.associationproxy import AssociationProxy
engine = create_engine('sqlite://')
@@ -55,14 +55,14 @@ mapper(OrderItem, orderitems, properties={
'item':relationship(Item, lazy='joined')
})
-session = create_session()
+session = Session()
# create our catalog
session.add_all([Item('SA T-Shirt', 10.99),
Item('SA Mug', 6.50),
Item('SA Hat', 8.99),
Item('MySQL Crowbar', 16.99)])
-session.flush()
+session.commit()
# function to return items
def item(name):
@@ -81,9 +81,7 @@ order.items.append(item('SA Mug'))
order.items.append(item('SA Hat'))
session.add(order)
-session.flush()
-
-session.expunge_all()
+session.commit()
# query the order, print items
order = session.query(Order).filter_by(customer_name='john smith').one()
diff --git a/examples/custom_attributes/custom_management.py b/examples/custom_attributes/custom_management.py
index 03d38c26a..2e2689140 100644
--- a/examples/custom_attributes/custom_management.py
+++ b/examples/custom_attributes/custom_management.py
@@ -1,20 +1,24 @@
-"""this example illustrates how to replace SQLAlchemy's class descriptors with a user-defined system.
+"""this example illustrates how to replace SQLAlchemy's class descriptors with
+a user-defined system.
-This sort of thing is appropriate for integration with frameworks that redefine class behaviors
-in their own way, such that SQLA's default instrumentation is not compatible.
+This sort of thing is appropriate for integration with frameworks that
+redefine class behaviors in their own way, such that SQLA's default
+instrumentation is not compatible.
-The example illustrates redefinition of instrumentation at the class level as well as the collection
-level, and redefines the storage of the class to store state within "instance._goofy_dict" instead
-of "instance.__dict__". Note that the default collection implementations can be used
-with a custom attribute system as well.
+The example illustrates redefinition of instrumentation at the class level as
+well as the collection level, and redefines the storage of the class to store
+state within "instance._goofy_dict" instead of "instance.__dict__". Note that
+the default collection implementations can be used with a custom attribute
+system as well.
"""
-from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, Text,
- ForeignKey)
-from sqlalchemy.orm import (mapper, relationship, create_session,
- InstrumentationManager)
+from sqlalchemy import create_engine, MetaData, Table, Column, Integer, Text,\
+ ForeignKey
+from sqlalchemy.orm import mapper, relationship, Session,\
+ InstrumentationManager
-from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute
+from sqlalchemy.orm.attributes import set_attribute, get_attribute, \
+ del_attribute
from sqlalchemy.orm.instrumentation import is_instrumented
from sqlalchemy.orm.collections import collection_adapter
@@ -119,17 +123,20 @@ class MyCollectionAdapter(object):
def fire_append_event(self, item, initiator=None):
if initiator is not False and item is not None:
- self.state.get_impl(self.key).fire_append_event(self.state, self.state.dict, item,
- initiator)
+ self.state.get_impl(self.key).\
+ fire_append_event(self.state, self.state.dict, item,
+ initiator)
def fire_remove_event(self, item, initiator=None):
if initiator is not False and item is not None:
- self.state.get_impl(self.key).fire_remove_event(self.state, self.state.dict, item,
- initiator)
+ self.state.get_impl(self.key).\
+ fire_remove_event(self.state, self.state.dict, item,
+ initiator)
def fire_pre_remove_event(self, initiator=None):
- self.state.get_impl(self.key).fire_pre_remove_event(self.state, self.state.dict,
- initiator)
+ self.state.get_impl(self.key).\
+ fire_pre_remove_event(self.state, self.state.dict,
+ initiator)
class MyCollection(object):
def __init__(self):
@@ -151,8 +158,13 @@ class MyCollection(object):
if __name__ == '__main__':
meta = MetaData(create_engine('sqlite://'))
- table1 = Table('table1', meta, Column('id', Integer, primary_key=True), Column('name', Text))
- table2 = Table('table2', meta, Column('id', Integer, primary_key=True), Column('name', Text), Column('t1id', Integer, ForeignKey('table1.id')))
+ table1 = Table('table1', meta,
+ Column('id', Integer, primary_key=True),
+ Column('name', Text))
+ table2 = Table('table2', meta,
+ Column('id', Integer, primary_key=True),
+ Column('name', Text),
+ Column('t1id', Integer, ForeignKey('table1.id')))
meta.create_all()
class A(MyClass):
@@ -173,11 +185,10 @@ if __name__ == '__main__':
assert a1.bs[0].name == 'b1'
assert isinstance(a1.bs, MyCollection)
- sess = create_session()
+ sess = Session()
sess.add(a1)
- sess.flush()
- sess.expunge_all()
+ sess.commit()
a1 = sess.query(A).get(a1.id)
@@ -187,8 +198,7 @@ if __name__ == '__main__':
a1.bs.remove(a1.bs[0])
- sess.flush()
- sess.expunge_all()
+ sess.commit()
a1 = sess.query(A).get(a1.id)
assert len(a1.bs) == 1
diff --git a/examples/sharding/attribute_shard.py b/examples/sharding/attribute_shard.py
index 1a39f5de3..fd1fa50ae 100644
--- a/examples/sharding/attribute_shard.py
+++ b/examples/sharding/attribute_shard.py
@@ -248,9 +248,7 @@ quito.reports.append(Report(85))
sess = create_session()
for c in [tokyo, newyork, toronto, london, dublin, brasilia, quito]:
sess.add(c)
-sess.flush()
-
-sess.expunge_all()
+sess.commit()
t = sess.query(WeatherLocation).get(tokyo.id)
assert t.city == tokyo.city
diff --git a/examples/vertical/dictlike-polymorphic.py b/examples/vertical/dictlike-polymorphic.py
index e4046b3ad..1acb6aee5 100644
--- a/examples/vertical/dictlike-polymorphic.py
+++ b/examples/vertical/dictlike-polymorphic.py
@@ -132,7 +132,7 @@ if __name__ == '__main__':
from sqlalchemy import (MetaData, Table, Column, Integer, Unicode,
ForeignKey, UnicodeText, and_, not_, or_, String, Boolean, cast, text,
null, case)
- from sqlalchemy.orm import mapper, relationship, create_session
+ from sqlalchemy.orm import mapper, relationship, Session
from sqlalchemy.orm.collections import attribute_mapped_collection
metadata = MetaData()
@@ -191,7 +191,7 @@ if __name__ == '__main__':
metadata.bind = 'sqlite:///'
metadata.create_all()
- session = create_session()
+ session = Session()
stoat = Animal(u'stoat')
stoat[u'color'] = u'red'
@@ -199,8 +199,7 @@ if __name__ == '__main__':
stoat[u'weasel-like'] = True
session.add(stoat)
- session.flush()
- session.expunge_all()
+ session.commit()
critter = session.query(Animal).filter(Animal.name == u'stoat').one()
print critter[u'color']
@@ -210,7 +209,7 @@ if __name__ == '__main__':
critter[u'cuteness'] = u'very cute'
metadata.bind.echo = True
- session.flush()
+ session.commit()
metadata.bind.echo = False
marten = Animal(u'marten')
@@ -225,7 +224,7 @@ if __name__ == '__main__':
shrew[u'poisonous'] = True
session.add(shrew)
- session.flush()
+ session.commit()
q = (session.query(Animal).
filter(Animal.facts.any(
diff --git a/examples/vertical/dictlike.py b/examples/vertical/dictlike.py
index ce76b3140..e288d70ba 100644
--- a/examples/vertical/dictlike.py
+++ b/examples/vertical/dictlike.py
@@ -125,7 +125,7 @@ class VerticalPropertyDictMixin(object):
if __name__ == '__main__':
from sqlalchemy import (MetaData, Table, Column, Integer, Unicode,
ForeignKey, UnicodeText, and_, not_)
- from sqlalchemy.orm import mapper, relationship, create_session
+ from sqlalchemy.orm import mapper, relationship, Session
from sqlalchemy.orm.collections import attribute_mapped_collection
metadata = MetaData()
@@ -175,7 +175,7 @@ if __name__ == '__main__':
metadata.bind = 'sqlite:///'
metadata.create_all()
- session = create_session()
+ session = Session()
stoat = Animal(u'stoat')
stoat[u'color'] = u'reddish'
@@ -186,8 +186,7 @@ if __name__ == '__main__':
print stoat.facts[u'color']
session.add(stoat)
- session.flush()
- session.expunge_all()
+ session.commit()
critter = session.query(Animal).filter(Animal.name == u'stoat').one()
print critter[u'color']
@@ -197,7 +196,7 @@ if __name__ == '__main__':
print 'changing cuteness:'
metadata.bind.echo = True
- session.flush()
+ session.commit()
metadata.bind.echo = False
marten = Animal(u'marten')
@@ -214,7 +213,7 @@ if __name__ == '__main__':
loris[u'cuteness'] = u'fairly'
loris[u'poisonous-part'] = u'elbows'
session.add(loris)
- session.flush()
+ session.commit()
q = (session.query(Animal).
filter(Animal.facts.any(
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index cb4e8e10b..5eea53ac6 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -43,6 +43,7 @@ from sqlalchemy.sql import (
subquery,
text,
tuple_,
+ type_coerce,
union,
union_all,
update,
@@ -114,6 +115,6 @@ from sqlalchemy.engine import create_engine, engine_from_config
__all__ = sorted(name for name, obj in locals().items()
if not (name.startswith('_') or inspect.ismodule(obj)))
-__version__ = '0.6.5'
+__version__ = '0.6.6'
del inspect, sys
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 95a5bf4c4..5c3b72647 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -114,6 +114,8 @@ Known Issues
------------
* No support for more than one ``IDENTITY`` column per table
+* reflection of indexes does not work with versions older than
+ SQL Server 2005
"""
import datetime, decimal, inspect, operator, sys, re
@@ -755,20 +757,20 @@ class MSSQLCompiler(compiler.SQLCompiler):
return None
def visit_table(self, table, mssql_aliased=False, **kwargs):
- if mssql_aliased:
+ if mssql_aliased is table:
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
# alias schema-qualified tables
alias = self._schema_aliased_table(table)
if alias is not None:
- return self.process(alias, mssql_aliased=True, **kwargs)
+ return self.process(alias, mssql_aliased=table, **kwargs)
else:
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
def visit_alias(self, alias, **kwargs):
# translate for schema-qualified table aliases
self.tablealiases[alias.original] = alias
- kwargs['mssql_aliased'] = True
+ kwargs['mssql_aliased'] = alias.original
return super(MSSQLCompiler, self).visit_alias(alias, **kwargs)
def visit_extract(self, extract, **kw):
@@ -1124,26 +1126,55 @@ class MSDialect(default.DefaultDialect):
view_names = [r[0] for r in connection.execute(s)]
return view_names
- # The cursor reports it is closed after executing the sp.
@reflection.cache
def get_indexes(self, connection, tablename, schema=None, **kw):
+ # using system catalogs, don't support index reflection
+ # below MS 2005
+ if self.server_version_info < MS_2005_VERSION:
+ return []
+
current_schema = schema or self.default_schema_name
- col_finder = re.compile("(\w+)")
full_tname = "%s.%s" % (current_schema, tablename)
- indexes = []
- s = sql.text("exec sp_helpindex '%s'" % full_tname)
- rp = connection.execute(s)
- if rp.closed:
- # did not work for this setup.
- return []
+
+ rp = connection.execute(
+ sql.text("select ind.index_id, ind.is_unique, ind.name "
+ "from sys.indexes as ind join sys.tables as tab on "
+ "ind.object_id=tab.object_id "
+ "join sys.schemas as sch on sch.schema_id=tab.schema_id "
+ "where tab.name = :tabname "
+ "and sch.name=:schname "
+ "and ind.is_primary_key=0",
+ bindparams=[
+ sql.bindparam('tabname', tablename, sqltypes.Unicode),
+ sql.bindparam('schname', current_schema, sqltypes.Unicode)
+ ]
+ )
+ )
+ indexes = {}
for row in rp:
- if 'primary key' not in row['index_description']:
- indexes.append({
- 'name' : row['index_name'],
- 'column_names' : col_finder.findall(row['index_keys']),
- 'unique': 'unique' in row['index_description']
- })
- return indexes
+ indexes[row['index_id']] = {
+ 'name':row['name'],
+ 'unique':row['is_unique'] == 1,
+ 'column_names':[]
+ }
+ rp = connection.execute(
+ sql.text("select ind_col.index_id, col.name from sys.columns as col "
+ "join sys.index_columns as ind_col on "
+ "ind_col.column_id=col.column_id "
+ "join sys.tables as tab on tab.object_id=col.object_id "
+ "join sys.schemas as sch on sch.schema_id=tab.schema_id "
+ "where tab.name=:tabname "
+ "and sch.name=:schname",
+ bindparams=[
+ sql.bindparam('tabname', tablename, sqltypes.Unicode),
+ sql.bindparam('schname', current_schema, sqltypes.Unicode)
+ ]),
+ )
+ for row in rp:
+ if row['index_id'] in indexes:
+ indexes[row['index_id']]['column_names'].append(row['name'])
+
+ return indexes.values()
@reflection.cache
def get_view_definition(self, connection, viewname, schema=None, **kw):
@@ -1207,13 +1238,13 @@ class MSDialect(default.DefaultDialect):
"Did not recognize type '%s' of column '%s'" %
(type, name))
coltype = sqltypes.NULLTYPE
+ else:
+ if issubclass(coltype, sqltypes.Numeric) and \
+ coltype is not MSReal:
+ kwargs['scale'] = numericscale
+ kwargs['precision'] = numericprec
- if issubclass(coltype, sqltypes.Numeric) and \
- coltype is not MSReal:
- kwargs['scale'] = numericscale
- kwargs['precision'] = numericprec
-
- coltype = coltype(**kwargs)
+ coltype = coltype(**kwargs)
cdict = {
'name' : name,
'type' : coltype,
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index cd1606dbf..4dd6436cd 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -1,3 +1,5 @@
+# TODO: should be using the sys. catalog with SQL Server, not information schema
+
from sqlalchemy import Table, MetaData, Column, ForeignKey
from sqlalchemy.types import String, Unicode, Integer, TypeDecorator
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index b6728c6b0..c5f471942 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -85,7 +85,9 @@ class MSDialect_pymssql(MSDialect):
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
- opts.pop('port', None)
+ port = opts.pop('port', None)
+ if port and 'host' in opts:
+ opts['host'] = "%s:%s" % (opts['host'], port)
return [[], opts]
def is_disconnect(self, e):
@@ -99,4 +101,4 @@ class MSDialect_pymssql(MSDialect):
else:
return False
-dialect = MSDialect_pymssql \ No newline at end of file
+dialect = MSDialect_pymssql
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index a2d3748f3..660d201d1 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -2371,8 +2371,8 @@ class MySQLTableDefinitionParser(object):
r'(?: +COLLATE +(?P<collate>[\w_]+))?'
r'(?: +(?P<notnull>NOT NULL))?'
r'(?: +DEFAULT +(?P<default>'
- r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+)'
- r'(?:ON UPDATE \w+)?'
+ r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+'
+ r'(?: +ON UPDATE \w+)?)'
r'))?'
r'(?: +(?P<autoincr>AUTO_INCREMENT))?'
r'(?: +COMMENT +(P<comment>(?:\x27\x27|[^\x27])+))?'
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index 06d3e6616..0c0c39b67 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -92,7 +92,7 @@ class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect):
def _extract_error_code(self, exception):
# e.g.: DBAPIError: (Error) Table 'test.u2' doesn't exist
# [SQLCode: 1146], [SQLState: 42S02] 'DESCRIBE `u2`' ()
- m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.orig.args))
+ m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.args))
c = m.group(1)
if c:
return int(c)
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 0aa348953..256972696 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -640,9 +640,11 @@ class OracleDialect(default.DefaultDialect):
def initialize(self, connection):
super(OracleDialect, self).initialize(connection)
- self.implicit_returning = self.server_version_info > (10, ) and \
- self.__dict__.get('implicit_returning', True)
-
+ self.implicit_returning = self.__dict__.get(
+ 'implicit_returning',
+ self.server_version_info > (10, )
+ )
+
if self._is_oracle_8:
self.colspecs = self.colspecs.copy()
self.colspecs.pop(sqltypes.Interval)
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 89769b8c0..0d103cb0d 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -171,7 +171,7 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
"""
__visit_name__ = 'ARRAY'
- def __init__(self, item_type, mutable=True):
+ def __init__(self, item_type, mutable=True, as_tuple=False):
"""Construct an ARRAY.
E.g.::
@@ -186,9 +186,14 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
``ARRAY(ARRAY(Integer))`` or such. The type mapping figures out on
the fly
- :param mutable: Defaults to True: specify whether lists passed to this
+ :param mutable=True: Specify whether lists passed to this
class should be considered mutable. If so, generic copy operations
(typically used by the ORM) will shallow-copy values.
+
+ :param as_tuple=False: Specify whether return results should be converted
+ to tuples from lists. DBAPIs such as psycopg2 return lists by default.
+ When tuples are returned, the results are hashable. This flag can only
+ be set to ``True`` when ``mutable`` is set to ``False``. (new in 0.6.5)
"""
if isinstance(item_type, ARRAY):
@@ -198,7 +203,12 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
item_type = item_type()
self.item_type = item_type
self.mutable = mutable
-
+ if mutable and as_tuple:
+ raise exc.ArgumentError(
+ "mutable must be set to False if as_tuple is True."
+ )
+ self.as_tuple = as_tuple
+
def copy_value(self, value):
if value is None:
return None
@@ -224,7 +234,8 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
def adapt(self, impltype):
return impltype(
self.item_type,
- mutable=self.mutable
+ mutable=self.mutable,
+ as_tuple=self.as_tuple
)
def bind_processor(self, dialect):
@@ -252,19 +263,28 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
if item_proc:
def convert_item(item):
if isinstance(item, list):
- return [convert_item(child) for child in item]
+ r = [convert_item(child) for child in item]
+ if self.as_tuple:
+ r = tuple(r)
+ return r
else:
return item_proc(item)
else:
def convert_item(item):
if isinstance(item, list):
- return [convert_item(child) for child in item]
+ r = [convert_item(child) for child in item]
+ if self.as_tuple:
+ r = tuple(r)
+ return r
else:
return item
def process(value):
if value is None:
return value
- return [convert_item(item) for item in value]
+ r = [convert_item(item) for item in value]
+ if self.as_tuple:
+ r = tuple(r)
+ return r
return process
PGArray = ARRAY
@@ -1033,28 +1053,32 @@ class PGDialect(default.DefaultDialect):
else:
args = ()
- if attype in self.ischema_names:
- coltype = self.ischema_names[attype]
- elif attype in enums:
- enum = enums[attype]
- coltype = ENUM
- if "." in attype:
- kwargs['schema'], kwargs['name'] = attype.split('.')
- else:
- kwargs['name'] = attype
- args = tuple(enum['labels'])
- elif attype in domains:
- domain = domains[attype]
- if domain['attype'] in self.ischema_names:
+ while True:
+ if attype in self.ischema_names:
+ coltype = self.ischema_names[attype]
+ break
+ elif attype in enums:
+ enum = enums[attype]
+ coltype = ENUM
+ if "." in attype:
+ kwargs['schema'], kwargs['name'] = attype.split('.')
+ else:
+ kwargs['name'] = attype
+ args = tuple(enum['labels'])
+ break
+ elif attype in domains:
+ domain = domains[attype]
+ attype = domain['attype']
# A table can't override whether the domain is nullable.
nullable = domain['nullable']
if domain['default'] and not default:
# It can, however, override the default
# value, but can't set it to null.
default = domain['default']
- coltype = self.ischema_names[domain['attype']]
- else:
- coltype = None
+ continue
+ else:
+ coltype = None
+ break
if coltype:
coltype = coltype(*args, **kwargs)
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 43d3dd038..36b86cabf 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -278,14 +278,15 @@ def _coerce_config(configuration, prefix):
for key in configuration
if key.startswith(prefix))
for option, type_ in (
- ('convert_unicode', bool),
+ ('convert_unicode', util.bool_or_str('force')),
('pool_timeout', int),
- ('echo', bool),
- ('echo_pool', bool),
+ ('echo', util.bool_or_str('debug')),
+ ('echo_pool', util.bool_or_str('debug')),
('pool_recycle', int),
('pool_size', int),
('max_overflow', int),
('pool_threadlocal', bool),
+ ('use_native_unicode', bool),
):
util.coerce_kw_type(options, option, type_)
return options
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 390094c7d..13755d49a 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -565,7 +565,6 @@ class DefaultExecutionContext(base.ExecutionContext):
in all cases.
"""
-
return self.cursor.lastrowid
def handle_dbapi_exception(self, e):
diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py
index fabd9aaf9..dd2df63d3 100755
--- a/lib/sqlalchemy/ext/declarative.py
+++ b/lib/sqlalchemy/ext/declarative.py
@@ -589,7 +589,8 @@ keys, as a :class:`ForeignKey` itself contains references to columns
which can't be properly recreated at this level. For columns that
have foreign keys, as well as for the variety of mapper-level constructs
that require destination-explicit context, the
-:func:`~.declared_attr` decorator is provided so that
+:func:`~.declared_attr` decorator (renamed from ``sqlalchemy.util.classproperty`` in 0.6.5)
+is provided so that
patterns common to many classes can be defined as callables::
from sqlalchemy.ext.declarative import declared_attr
@@ -820,6 +821,81 @@ from multiple collections::
id = Column(Integer, primary_key=True)
+Defining Indexes in Mixins
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you need to define a multi-column index that applies to all tables
+that make use of a particular mixin, you will need to do this in a
+metaclass as shown in the following example::
+
+ from sqlalchemy.ext.declarative import DeclarativeMeta
+
+ class MyMixinMeta(DeclarativeMeta):
+
+ def __init__(cls,*args,**kw):
+ if getattr(cls,'_decl_class_registry',None) is None:
+ return
+ super(MyMeta,cls).__init__(*args,**kw)
+ # Index creation done here
+ Index('test',cls.a,cls.b)
+
+ class MyMixin(object):
+ __metaclass__=MyMixinMeta
+ a = Column(Integer)
+ b = Column(Integer)
+
+ class MyModel(Base,MyMixin):
+ __tablename__ = 'atable'
+ c = Column(Integer,primary_key=True)
+
+Using multiple Mixins that require Metaclasses
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you end up in a situation where you need to use multiple mixins and
+more than one of them uses a metaclass to, for example, create a
+multi-column index, then you will need to create a metaclass that
+correctly combines the actions of the other metaclasses. For example::
+
+ class MyMeta1(DeclarativeMeta):
+
+ def __init__(cls,*args,**kw):
+ if getattr(cls,'_decl_class_registry',None) is None:
+ return
+ super(MyMeta1,cls).__init__(*args,**kw)
+ Index('ab',cls.a,cls.b)
+
+ class MyMixin1(object):
+ __metaclass__=MyMeta1
+ a = Column(Integer)
+ b = Column(Integer)
+
+ class MyMeta2(DeclarativeMeta):
+
+ def __init__(cls,*args,**kw):
+ if getattr(cls,'_decl_class_registry',None) is None:
+ return
+ super(MyMeta2,cls).__init__(*args,**kw)
+ Index('cd',cls.c,cls.d)
+
+ class MyMixin2(object):
+ __metaclass__=MyMeta2
+ c = Column(Integer)
+ d = Column(Integer)
+
+ class CombinedMeta(MyMeta1,MyMeta2):
+ # This is needed to successfully combine
+ # two mixins which both have metaclasses
+ pass
+
+ class MyModel(Base,MyMixin1,MyMixin2):
+ __tablename__ = 'awooooga'
+ __metaclass__ = CombinedMeta
+ z = Column(Integer,primary_key=True)
+
+For this reason, if a mixin requires a custom metaclass, this should
+be mentioned in any documentation of that mixin to avoid confusion
+later down the line.
+
Class Constructor
=================
@@ -939,7 +1015,7 @@ def _as_declarative(cls, classname, dict_):
if name not in dict_ and not (
'__table__' in dict_ and
(obj.name or name) in dict_['__table__'].c
- ):
+ ) and name not in potential_columns:
potential_columns[name] = \
column_copies[obj] = \
obj.copy()
@@ -971,6 +1047,13 @@ def _as_declarative(cls, classname, dict_):
for k, v in mapper_args.iteritems():
mapper_args[k] = column_copies.get(v,v)
+
+ if classname in cls._decl_class_registry:
+ util.warn("The classname %r is already in the registry of this"
+ " declarative base, mapped to %r" % (
+ classname,
+ cls._decl_class_registry[classname]
+ ))
cls._decl_class_registry[classname] = cls
our_stuff = util.OrderedDict()
@@ -1288,7 +1371,7 @@ class declared_attr(property):
a mapped property or special declarative member name.
.. note:: @declared_attr is available as
- sqlalchemy.util.classproperty for SQLAlchemy versions
+ ``sqlalchemy.util.classproperty`` for SQLAlchemy versions
0.6.2, 0.6.3, 0.6.4.
@declared_attr turns the attribute into a scalar-like
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 8b32d1a27..18031e15f 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -84,6 +84,7 @@ __all__ = (
'eagerload',
'eagerload_all',
'extension',
+ 'immediateload',
'join',
'joinedload',
'joinedload_all',
@@ -335,7 +336,12 @@ def relationship(argument, secondary=None, **kwargs):
``select``. Values include:
* ``select`` - items should be loaded lazily when the property is first
- accessed, using a separate SELECT statement.
+ accessed, using a separate SELECT statement, or identity map
+ fetch for simple many-to-one references.
+
+ * ``immediate`` - items should be loaded as the parents are loaded,
+ using a separate SELECT statement, or identity map fetch for
+ simple many-to-one references. (new as of 0.6.5)
* ``joined`` - items should be loaded "eagerly" in the same query as
that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
@@ -957,11 +963,24 @@ def compile_mappers():
m.compile()
def clear_mappers():
- """Remove all mappers that have been created thus far.
-
- The mapped classes will return to their initial "unmapped" state and can
- be re-mapped with new mappers.
-
+ """Remove all mappers from all classes.
+
+ This function removes all instrumentation from classes and disposes
+ of their associated mappers. Once called, the classes are unmapped
+ and can be later re-mapped with new mappers.
+
+ :func:`.clear_mappers` is *not* for normal use, as there is literally no
+ valid usage for it outside of very specific testing scenarios. Normally,
+ mappers are permanent structural components of user-defined classes, and
+ are never discarded independently of their class. If a mapped class itself
+ is garbage collected, its mapper is automatically disposed of as well. As
+ such, :func:`.clear_mappers` is only for usage in test suites that re-use
+ the same classes with different mappings, which is itself an extremely rare
+ use case - the only such use case is in fact SQLAlchemy's own test suite,
+ and possibly the test suites of other ORM extension libraries which
+ intend to test various combinations of mapper construction upon a fixed
+ set of classes.
+
"""
mapperlib._COMPILE_MUTEX.acquire()
try:
@@ -1122,7 +1141,7 @@ def subqueryload_all(*keys):
query.options(subqueryload_all(User.orders, Order.items,
Item.keywords))
- See also: :func:`joinedload_all`, :func:`lazyload`
+ See also: :func:`joinedload_all`, :func:`lazyload`, :func:`immediateload`
"""
return strategies.EagerLazyOption(keys, lazy="subquery", chained=True)
@@ -1134,7 +1153,7 @@ def lazyload(*keys):
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
- See also: :func:`eagerload`, :func:`subqueryload`
+ See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
"""
return strategies.EagerLazyOption(keys, lazy=True)
@@ -1145,11 +1164,24 @@ def noload(*keys):
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
- See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
+ See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
"""
return strategies.EagerLazyOption(keys, lazy=None)
+def immediateload(*keys):
+ """Return a ``MapperOption`` that will convert the property of the given
+ name into an immediate load.
+
+ Used with :meth:`~sqlalchemy.orm.query.Query.options`.
+
+ See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
+
+ New as of verison 0.6.5.
+
+ """
+ return strategies.EagerLazyOption(keys, lazy='immediate')
+
def contains_alias(alias):
"""Return a ``MapperOption`` that will indicate to the query that
the main table has been aliased.
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index 2b0883cfb..caa057717 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -36,7 +36,7 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- return (None, None)
+ return None, None, None
log.class_logger(DynaLoader)
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index f4933b8ca..6d30abf91 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -78,38 +78,8 @@ class MapperProperty(object):
pass
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- """Return a 2-tuple consiting of two row processing functions and
- an instance post-processing function.
-
- Input arguments are the query.SelectionContext and the *first*
- applicable row of a result set obtained within
- query.Query.instances(), called only the first time a particular
- mapper's populate_instance() method is invoked for the overall result.
-
- The settings contained within the SelectionContext as well as the
- columns present in the row (which will be the same columns present in
- all rows) are used to determine the presence and behavior of the
- returned callables. The callables will then be used to process all
- rows and instances.
-
- Callables are of the following form::
-
- def new_execute(state, dict_, row, isnew):
- # process incoming instance state and given row.
- # the instance is
- # "new" and was just created upon receipt of this row.
- "isnew" indicates if the instance was newly created as a
- result of reading this row
-
- def existing_execute(state, dict_, row):
- # process incoming instance state and given row. the
- # instance is
- # "existing" and was created based on a previous row.
-
- return (new_execute, existing_execute)
-
- Either of the three tuples can be ``None`` in which case no function
- is called.
+ """Return a 3-tuple consisting of three row processing functions.
+
"""
raise NotImplementedError()
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index a40021663..6ab6208fd 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -2135,10 +2135,11 @@ class Mapper(object):
state.load_path = load_path
if not new_populators:
- new_populators[:], existing_populators[:] = \
- self._populators(context, path, row,
- adapter)
-
+ self._populators(context, path, row, adapter,
+ new_populators,
+ existing_populators
+ )
+
if isnew:
populators = new_populators
else:
@@ -2309,20 +2310,24 @@ class Mapper(object):
return instance
return _instance
- def _populators(self, context, path, row, adapter):
+ def _populators(self, context, path, row, adapter,
+ new_populators, existing_populators):
"""Produce a collection of attribute level row processor callables."""
- new_populators, existing_populators = [], []
+ delayed_populators = []
for prop in self._props.itervalues():
- newpop, existingpop = prop.create_row_processor(
+ newpop, existingpop, delayedpop = prop.create_row_processor(
context, path,
self, row, adapter)
if newpop:
new_populators.append((prop.key, newpop))
if existingpop:
existing_populators.append((prop.key, existingpop))
- return new_populators, existing_populators
-
+ if delayedpop:
+ delayed_populators.append((prop.key, delayedpop))
+ if delayed_populators:
+ new_populators.extend(delayed_populators)
+
def _configure_subclass_mapper(self, context, path, adapter):
"""Produce a mapper level row processor callable factory for mappers
inheriting this one."""
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 4efd2acc9..0cbbf630d 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -255,7 +255,7 @@ class DescriptorProperty(MapperProperty):
pass
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- return (None, None)
+ return None, None, None
def merge(self, session, source_state, source_dict,
dest_state, dest_dict, load, _recursive):
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 605f391aa..468bcc19d 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -425,8 +425,8 @@ class Query(object):
return stmt._annotate({'_halt_adapt': True})
def subquery(self):
- """return the full SELECT statement represented by this Query,
- embedded within an Alias.
+ """return the full SELECT statement represented by this :class:`.Query`,
+ embedded within an :class:`.Alias`.
Eager JOIN generation within the query is disabled.
@@ -436,7 +436,33 @@ class Query(object):
"""
return self.enable_eagerloads(False).statement.alias()
+
+ def label(self, name):
+ """Return the full SELECT statement represented by this :class:`.Query`, converted
+ to a scalar subquery with a label of the given name.
+
+ Analagous to :meth:`sqlalchemy.sql._SelectBaseMixin.label`.
+
+ New in 0.6.5.
+
+ """
+
+ return self.enable_eagerloads(False).statement.label(name)
+
+ def as_scalar(self):
+ """Return the full SELECT statement represented by this :class:`.Query`, converted
+ to a scalar subquery.
+
+ Analagous to :meth:`sqlalchemy.sql._SelectBaseMixin.as_scalar`.
+
+ New in 0.6.5.
+
+ """
+
+ return self.enable_eagerloads(False).statement.as_scalar()
+
+
def __clause_element__(self):
return self.enable_eagerloads(False).with_labels().statement
@@ -758,7 +784,36 @@ class Query(object):
# end Py2K
except StopIteration:
return None
+
+ @_generative()
+ def with_entities(self, *entities):
+ """Return a new :class:`.Query` replacing the SELECT list with the given
+ entities.
+
+ e.g.::
+
+ # Users, filtered on some arbitrary criterion
+ # and then ordered by related email address
+ q = session.query(User).\\
+ join(User.address).\\
+ filter(User.name.like('%ed%')).\\
+ order_by(Address.email)
+ # given *only* User.id==5, Address.email, and 'q', what
+ # would the *next* User in the result be ?
+ subq = q.with_entities(Address.email).\\
+ order_by(None).\\
+ filter(User.id==5).\\
+ subquery()
+ q = q.join((subq, subq.c.email < Address.email)).\\
+ limit(1)
+
+ New in 0.6.5.
+
+ """
+ self._set_entities(entities)
+
+
@_generative()
def add_columns(self, *column):
"""Add one or more column expressions to the list
@@ -1721,10 +1776,8 @@ class Query(object):
query_entity.row_processor(self, context, custom_rows)
for query_entity in self._entities
])
-
- if not single_entity:
- labels = [l for l in labels if l]
-
+
+
while True:
context.progress = {}
context.partials = {}
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 3444c12ac..5e1c7ba09 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -532,20 +532,13 @@ class Session(object):
transaction or nested transaction, an error is raised, unless
``subtransactions=True`` or ``nested=True`` is specified.
- The ``subtransactions=True`` flag indicates that this ``begin()`` can
- create a subtransaction if a transaction is already in progress. A
- subtransaction is a non-transactional, delimiting construct that
- allows matching begin()/commit() pairs to be nested together, with
- only the outermost begin/commit pair actually affecting transactional
- state. When a rollback is issued, the subtransaction will directly
- roll back the innermost real transaction, however each subtransaction
- still must be explicitly rolled back to maintain proper stacking of
- subtransactions.
-
- If no transaction is in progress, then a real transaction is begun.
-
+ The ``subtransactions=True`` flag indicates that this :meth:`~.Session.begin`
+ can create a subtransaction if a transaction is already in progress.
+ For documentation on subtransactions, please see :ref:`session_subtransactions`.
+
The ``nested`` flag begins a SAVEPOINT transaction and is equivalent
- to calling ``begin_nested()``.
+ to calling :meth:`~.Session.begin_nested`. For documentation on SAVEPOINT
+ transactions, please see :ref:`session_begin_nested`.
"""
if self.transaction is not None:
@@ -567,10 +560,8 @@ class Session(object):
The target database(s) must support SQL SAVEPOINTs or a
SQLAlchemy-supported vendor implementation of the idea.
- The nested transaction is a real transation, unlike a "subtransaction"
- which corresponds to multiple ``begin()`` calls. The next
- ``rollback()`` or ``commit()`` call will operate upon this nested
- transaction.
+ For documentation on SAVEPOINT
+ transactions, please see :ref:`session_begin_nested`.
"""
return self.begin(nested=True)
@@ -593,9 +584,16 @@ class Session(object):
def commit(self):
"""Flush pending changes and commit the current transaction.
-
+
If no transaction is in progress, this method raises an
InvalidRequestError.
+
+ By default, the :class:`.Session` also expires all database
+ loaded state on all ORM-managed attributes after transaction commit.
+ This so that subsequent operations load the most recent
+ data from the database. This behavior can be disabled using
+ the ``expire_on_commit=False`` option to :func:`.sessionmaker` or
+ the :class:`.Session` constructor.
If a subtransaction is in effect (which occurs when begin() is called
multiple times), the subtransaction will be closed, and the next call
@@ -1490,22 +1488,42 @@ class Session(object):
ext.after_flush_postexec(self, flush_context)
def is_modified(self, instance, include_collections=True, passive=False):
- """Return True if instance has modified attributes.
+ """Return ``True`` if instance has modified attributes.
This method retrieves a history instance for each instrumented
attribute on the instance and performs a comparison of the current
- value to its previously committed value. Note that instances present
- in the 'dirty' collection may result in a value of ``False`` when
- tested with this method.
+ value to its previously committed value.
- `include_collections` indicates if multivalued collections should be
+ ``include_collections`` indicates if multivalued collections should be
included in the operation. Setting this to False is a way to detect
only local-column based properties (i.e. scalar columns or many-to-one
foreign keys) that would result in an UPDATE for this instance upon
flush.
- The `passive` flag indicates if unloaded attributes and collections
+ The ``passive`` flag indicates if unloaded attributes and collections
should not be loaded in the course of performing this test.
+
+ A few caveats to this method apply:
+
+ * Instances present in the 'dirty' collection may result in a value
+ of ``False`` when tested with this method. This because while
+ the object may have received attribute set events, there may be
+ no net changes on its state.
+ * Scalar attributes may not have recorded the "previously" set
+ value when a new value was applied, if the attribute was not loaded,
+ or was expired, at the time the new value was received - in these
+ cases, the attribute is assumed to have a change, even if there is
+ ultimately no net change against its database value. SQLAlchemy in
+ most cases does not need the "old" value when a set event occurs, so
+ it skips the expense of a SQL call if the old value isn't present,
+ based on the assumption that an UPDATE of the scalar value is
+ usually needed, and in those few cases where it isn't, is less
+ expensive on average than issuing a defensive SELECT.
+
+ The "old" value is fetched unconditionally only if the attribute
+ container has the "active_history" flag set to ``True``. This flag
+ is set typically for primary key attributes and scalar references
+ that are not a simple many-to-one.
"""
try:
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index dc8a07c17..bea4ee500 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -343,8 +343,14 @@ class InstanceState(object):
previous = attr.copy(previous)
self.committed_state[attr.key] = previous
-
- if not self.modified:
+
+
+ # the "or not self.modified" is defensive at
+ # this point. The assertion below is expected
+ # to be True:
+ # assert self._strong_obj is None or self.modified
+
+ if self._strong_obj is None or not self.modified:
instance_dict = self._instance_dict()
if instance_dict:
instance_dict._modified.add(self)
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 1696d1456..1de2dc3ee 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -89,7 +89,7 @@ class UninstrumentedColumnLoader(LoaderStrategy):
column_collection.append(c)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- return None, None
+ return None, None, None
class ColumnLoader(LoaderStrategy):
"""Strategize the loading of a plain column-based MapperProperty."""
@@ -128,11 +128,11 @@ class ColumnLoader(LoaderStrategy):
if col is not None and col in row:
def new_execute(state, dict_, row):
dict_[key] = row[col]
- return new_execute, None
+ return new_execute, None, None
else:
def new_execute(state, dict_, row):
state.expire_attribute_pre_commit(dict_, key)
- return new_execute, None
+ return new_execute, None, None
log.class_logger(ColumnLoader)
@@ -185,7 +185,7 @@ class CompositeColumnLoader(ColumnLoader):
def new_execute(state, dict_, row):
dict_[key] = composite_class(*[row[c] for c in columns])
- return new_execute, None
+ return new_execute, None, None
log.class_logger(CompositeColumnLoader)
@@ -212,7 +212,7 @@ class DeferredColumnLoader(LoaderStrategy):
# fire off on next access.
state.reset(dict_, key)
- return new_execute, None
+ return new_execute, None, None
def init(self):
if hasattr(self.parent_property, 'composite_class'):
@@ -349,7 +349,7 @@ class NoLoader(AbstractRelationshipLoader):
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
def new_execute(state, dict_, row):
state.initialize(self.key)
- return new_execute, None
+ return new_execute, None, None
log.class_logger(NoLoader)
@@ -510,7 +510,7 @@ class LazyLoader(AbstractRelationshipLoader):
# any existing state.
state.reset(dict_, key)
- return new_execute, None
+ return new_execute, None, None
@classmethod
def _create_lazy_clause(cls, prop, reverse_direction=False):
@@ -684,6 +684,23 @@ class LoadLazyAttribute(object):
else:
return None
+class ImmediateLoader(AbstractRelationshipLoader):
+ def init_class_attribute(self, mapper):
+ self.parent_property.\
+ _get_strategy(LazyLoader).\
+ init_class_attribute(mapper)
+
+ def setup_query(self, context, entity,
+ path, adapter, column_collection=None,
+ parentmapper=None, **kwargs):
+ pass
+
+ def create_row_processor(self, context, path, mapper, row, adapter):
+ def execute(state, dict_, row):
+ state.get_impl(self.key).get(state, dict_)
+
+ return None, None, execute
+
class SubqueryLoader(AbstractRelationshipLoader):
def init(self):
super(SubqueryLoader, self).init()
@@ -860,7 +877,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
path = interfaces._reduce_path(path)
if ('subquery', path) not in context.attributes:
- return None, None
+ return None, None, None
local_cols, remote_cols = self._local_remote_columns(self.parent_property)
@@ -904,7 +921,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
state.get_impl(self.key).\
set_committed_value(state, dict_, scalar)
- return execute, None
+ return execute, None, None
log.class_logger(SubqueryLoader)
@@ -922,6 +939,7 @@ class EagerLoader(AbstractRelationshipLoader):
def setup_query(self, context, entity, path, adapter, \
column_collection=None, parentmapper=None,
+ allow_innerjoin=True,
**kwargs):
"""Add a left outer join to the statement thats being constructed."""
@@ -972,10 +990,18 @@ class EagerLoader(AbstractRelationshipLoader):
if self.parent_property.direction != interfaces.MANYTOONE:
context.multi_row_eager_loaders = True
+ innerjoin = allow_innerjoin and context.attributes.get(
+ ("eager_join_type", path),
+ self.parent_property.innerjoin)
+ if not innerjoin:
+ # if this is an outer join, all eager joins from
+ # here must also be outer joins
+ allow_innerjoin = False
+
context.create_eager_joins.append(
(self._create_eager_join, context,
entity, path, adapter,
- parentmapper, clauses)
+ parentmapper, clauses, innerjoin)
)
add_to_collection = context.secondary_columns
@@ -990,10 +1016,12 @@ class EagerLoader(AbstractRelationshipLoader):
path + (self.mapper,),
clauses,
parentmapper=self.mapper,
- column_collection=add_to_collection)
+ column_collection=add_to_collection,
+ allow_innerjoin=allow_innerjoin)
def _create_eager_join(self, context, entity,
- path, adapter, parentmapper, clauses):
+ path, adapter, parentmapper,
+ clauses, innerjoin):
if parentmapper is None:
localparent = entity.mapper
@@ -1048,10 +1076,6 @@ class EagerLoader(AbstractRelationshipLoader):
else:
onclause = self.parent_property
- innerjoin = context.attributes.get(
- ("eager_join_type", path),
- self.parent_property.innerjoin)
-
context.eager_joins[entity_key] = eagerjoin = \
mapperutil.join(
towrap,
@@ -1157,7 +1181,7 @@ class EagerLoader(AbstractRelationshipLoader):
"Multiple rows returned with "
"uselist=False for eagerly-loaded attribute '%s' "
% self)
- return new_execute, existing_execute
+ return new_execute, existing_execute, None
else:
def new_execute(state, dict_, row):
collection = attributes.init_state_collection(
@@ -1182,7 +1206,7 @@ class EagerLoader(AbstractRelationshipLoader):
'append_without_event')
context.attributes[(state, key)] = result_list
_instance(row, result_list)
- return new_execute, existing_execute
+ return new_execute, existing_execute, None
else:
return self.parent_property.\
_get_strategy(LazyLoader).\
@@ -1222,6 +1246,8 @@ def factory(identifier):
return LazyLoader
elif identifier == 'subquery':
return SubqueryLoader
+ elif identifier == 'immediate':
+ return ImmediateLoader
else:
return LazyLoader
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index 8070fd9ca..6966fb90b 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -1988,7 +1988,7 @@ class MetaData(SchemaItem):
from sqlalchemy.sql.util import sort_tables
return sort_tables(self.tables.itervalues())
- def reflect(self, bind=None, schema=None, only=None):
+ def reflect(self, bind=None, schema=None, views=False, only=None):
"""Load all available table definitions from the database.
Automatically creates ``Table`` entries in this ``MetaData`` for any
@@ -2004,7 +2004,10 @@ class MetaData(SchemaItem):
:param schema:
Optional, query and reflect tables from an alterate schema.
-
+
+ :param views:
+ If True, also reflect views.
+
:param only:
Optional. Load only a sub-set of available named tables. May be
specified as a sequence of names or a callable.
@@ -2033,6 +2036,11 @@ class MetaData(SchemaItem):
available = util.OrderedSet(bind.engine.table_names(schema,
connection=conn))
+ if views:
+ available.update(
+ bind.dialect.get_view_names(conn or bind, schema)
+ )
+
current = set(self.tables.iterkeys())
if only is None:
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index aa18eac17..2bb5f6ab4 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -47,6 +47,7 @@ from sqlalchemy.sql.expression import (
table,
text,
tuple_,
+ type_coerce,
union,
union_all,
update,
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index e47db7e28..d3b8bf023 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -153,6 +153,10 @@ class _CompileLabel(visitors.Visitable):
def __init__(self, col, name):
self.element = col
self.name = name
+
+ @property
+ def type(self):
+ return self.element.type
@property
def quote(self):
@@ -317,7 +321,7 @@ class SQLCompiler(engine.Compiled):
if result_map is not None:
result_map[labelname.lower()] = \
- (label.name, (label, label.element, labelname), label.element.type)
+ (label.name, (label, label.element, labelname), label.type)
return self.process(label.element,
within_columns_clause=True,
@@ -329,7 +333,7 @@ class SQLCompiler(engine.Compiled):
return self.process(label.element,
within_columns_clause=False,
**kw)
-
+
def visit_column(self, column, result_map=None, **kwargs):
name = column.name
if name is None:
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 219e3bf14..625893a68 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -45,8 +45,8 @@ __all__ = [
'except_', 'except_all', 'exists', 'extract', 'func', 'modifier',
'collate', 'insert', 'intersect', 'intersect_all', 'join', 'label',
'literal', 'literal_column', 'not_', 'null', 'or_', 'outparam',
- 'outerjoin', 'select', 'subquery', 'table', 'text', 'tuple_', 'union',
- 'union_all', 'update', ]
+ 'outerjoin', 'select', 'subquery', 'table', 'text', 'tuple_', 'type_coerce',
+ 'union', 'union_all', 'update', ]
PARSE_AUTOCOMMIT = util._symbol('PARSE_AUTOCOMMIT')
@@ -666,6 +666,54 @@ def tuple_(*expr):
"""
return _Tuple(*expr)
+
+def type_coerce(expr, type_):
+ """Coerce the given expression into the given type, on the Python side only.
+
+ :func:`.type_coerce` is roughly similar to :func:.`cast`, except no
+ "CAST" expression is rendered - the given type is only applied towards
+ expression typing and against received result values.
+
+ e.g.::
+
+ from sqlalchemy.types import TypeDecorator
+ import uuid
+
+ class AsGuid(TypeDecorator):
+ impl = String
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ return str(value)
+ else:
+ return None
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ return uuid.UUID(value)
+ else:
+ return None
+
+ conn.execute(
+ select([type_coerce(mytable.c.ident, AsGuid)]).\\
+ where(
+ type_coerce(mytable.c.ident, AsGuid) ==
+ uuid.uuid3(uuid.NAMESPACE_URL, 'bar')
+ )
+ )
+
+ """
+ if hasattr(expr, '__clause_expr__'):
+ return type_coerce(expr.__clause_expr__())
+
+ elif not isinstance(expr, Visitable):
+ if expr is None:
+ return null()
+ else:
+ return literal(expr, type_=type_)
+ else:
+ return _Label(None, expr, type_=type_)
+
def label(name, obj):
"""Return a :class:`_Label` object for the
diff --git a/lib/sqlalchemy/test/__init__.py b/lib/sqlalchemy/test/__init__.py
index d69cedefd..7356945d2 100644
--- a/lib/sqlalchemy/test/__init__.py
+++ b/lib/sqlalchemy/test/__init__.py
@@ -6,7 +6,8 @@ by noseplugin.NoseSQLAlchemy.
"""
-from sqlalchemy.test import testing, engines, requires, profiling, pickleable, config
+from sqlalchemy_nose import config
+from sqlalchemy.test import testing, engines, requires, profiling, pickleable
from sqlalchemy.test.schema import Column, Table
from sqlalchemy.test.testing import \
AssertsCompiledSQL, \
diff --git a/lib/sqlalchemy/test/engines.py b/lib/sqlalchemy/test/engines.py
index 8b930175f..acae1d28a 100644
--- a/lib/sqlalchemy/test/engines.py
+++ b/lib/sqlalchemy/test/engines.py
@@ -1,6 +1,6 @@
import sys, types, weakref
from collections import deque
-import config
+from sqlalchemy_nose import config
from sqlalchemy.util import function_named, callable
from sqlalchemy import event
import re
diff --git a/lib/sqlalchemy/test/profiling.py b/lib/sqlalchemy/test/profiling.py
index c5256affa..835253a3a 100644
--- a/lib/sqlalchemy/test/profiling.py
+++ b/lib/sqlalchemy/test/profiling.py
@@ -6,7 +6,7 @@ in a more fine-grained way than nose's profiling plugin.
"""
import os, sys
-from sqlalchemy.test import config
+from sqlalchemy_nose import config
from sqlalchemy.test.util import function_named, gc_collect
from nose import SkipTest
diff --git a/lib/sqlalchemy/test/testing.py b/lib/sqlalchemy/test/testing.py
index 41ba3038f..12cbe5e02 100644
--- a/lib/sqlalchemy/test/testing.py
+++ b/lib/sqlalchemy/test/testing.py
@@ -8,7 +8,8 @@ import types
import warnings
from cStringIO import StringIO
-from sqlalchemy.test import config, assertsql, util as testutil
+from sqlalchemy_nose import config
+from sqlalchemy.test import assertsql, util as testutil
from sqlalchemy.util import function_named, py3k
from engines import drop_all_tables
@@ -207,9 +208,9 @@ def _block_unconditionally(db, reason):
return function_named(maybe, fn_name)
return decorate
-def only_on(db, reason):
+def only_on(dbs, reason):
carp = _should_carp_about_exclusion(reason)
- spec = db_spec(db)
+ spec = db_spec(*util.to_list(dbs))
def decorate(fn):
fn_name = fn.__name__
def maybe(*args, **kw):
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index 46e5901a3..ee1fdc67f 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -132,18 +132,28 @@ class AbstractType(Visitable):
# ClauseElement.compile()....this is a mistake.
if not dialect:
+ dialect = self._default_dialect
+
+ return dialect.type_compiler.process(self)
+
+ @property
+ def _default_dialect(self):
+ if self.__class__.__module__.startswith("sqlalchemy.dialects"):
+ tokens = self.__class__.__module__.split(".")[0:3]
+ mod = ".".join(tokens)
+ return getattr(__import__(mod).dialects, tokens[-1]).dialect()
+ else:
global DefaultDialect
if DefaultDialect is None:
from sqlalchemy.engine.default import DefaultDialect
- dialect = DefaultDialect()
+ return DefaultDialect()
- return dialect.type_compiler.process(self)
-
def __str__(self):
# Py3K
#return unicode(self.compile())
# Py2K
- return unicode(self.compile()).encode('ascii', 'backslashreplace')
+ return unicode(self.compile()).\
+ encode('ascii', 'backslashreplace')
# end Py2K
def __init__(self, *args, **kwargs):
@@ -346,21 +356,19 @@ class TypeDecorator(AbstractType):
"require a class-level variable "
"'impl' which refers to the class of "
"type being decorated")
- self.impl = self.__class__.impl(*args, **kwargs)
+ self.impl = to_instance(self.__class__.impl, *args, **kwargs)
def adapt(self, cls):
return cls()
def dialect_impl(self, dialect):
key = (dialect.__class__, dialect.server_version_info)
+
try:
return self._impl_dict[key]
except KeyError:
pass
- # adapt the TypeDecorator first, in
- # the case that the dialect maps the TD
- # to one of its native types (i.e. PGInterval)
adapted = dialect.type_descriptor(self)
if adapted is not self:
self._impl_dict[key] = adapted
@@ -369,7 +377,7 @@ class TypeDecorator(AbstractType):
# otherwise adapt the impl type, link
# to a copy of this TypeDecorator and return
# that.
- typedesc = self.load_dialect_impl(dialect)
+ typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect)
tt = self.copy()
if not isinstance(tt, self.__class__):
raise AssertionError('Type object %s does not properly '
@@ -381,27 +389,33 @@ class TypeDecorator(AbstractType):
return tt
@util.memoized_property
+ def _impl_dict(self):
+ return {}
+
+ @util.memoized_property
def _type_affinity(self):
return self.impl._type_affinity
def type_engine(self, dialect):
- impl = self.dialect_impl(dialect)
- if not isinstance(impl, TypeDecorator):
- return impl
+ """Return a TypeEngine instance for this TypeDecorator.
+
+ """
+ adapted = dialect.type_descriptor(self)
+ if adapted is not self:
+ return adapted
+ elif isinstance(self.impl, TypeDecorator):
+ return self.impl.type_engine(dialect)
else:
- return impl.impl
+ return self.load_dialect_impl(dialect)
def load_dialect_impl(self, dialect):
- """Loads the dialect-specific implementation of this type.
+ """User hook which can be overridden to provide a different 'impl'
+ type per-dialect.
- by default calls dialect.type_descriptor(self.impl), but
- can be overridden to provide different behavior.
+ by default returns self.impl.
"""
- if isinstance(self.impl, TypeDecorator):
- return self.impl.dialect_impl(dialect)
- else:
- return dialect.type_descriptor(self.impl)
+ return self.impl
def __getattr__(self, key):
"""Proxy all other undefined accessors to the underlying
@@ -503,9 +517,11 @@ class TypeDecorator(AbstractType):
return self.impl.is_mutable()
def _adapt_expression(self, op, othertype):
- return self.impl._adapt_expression(op, othertype)
-
-
+ op, typ =self.impl._adapt_expression(op, othertype)
+ if typ is self.impl:
+ return op, self
+ else:
+ return op, typ
class MutableType(object):
"""A mixin that marks a :class:`TypeEngine` as representing
@@ -593,12 +609,12 @@ class MutableType(object):
"""Compare *x* == *y*."""
return x == y
-def to_instance(typeobj):
+def to_instance(typeobj, *arg, **kw):
if typeobj is None:
return NULLTYPE
if util.callable(typeobj):
- return typeobj()
+ return typeobj(*arg, **kw)
else:
return typeobj
diff --git a/lib/sqlalchemy/util.py b/lib/sqlalchemy/util.py
index 351b50883..e647e3d63 100644
--- a/lib/sqlalchemy/util.py
+++ b/lib/sqlalchemy/util.py
@@ -584,6 +584,18 @@ def asbool(obj):
raise ValueError("String is not true/false: %r" % obj)
return bool(obj)
+def bool_or_str(*text):
+ """Return a callable that will evaulate a string as
+ boolean, or one of a set of "alternate" string values.
+
+ """
+ def bool_or_value(obj):
+ if obj in text:
+ return obj
+ else:
+ return asbool(obj)
+ return bool_or_value
+
def coerce_kw_type(kw, key, type_, flexi_bool=True):
"""If 'key' is present in dict 'kw', coerce its value to type 'type\_' if
necessary. If 'flexi_bool' is True, the string '0' is considered false
@@ -745,7 +757,7 @@ class NamedTuple(tuple):
return t
def keys(self):
- return self._labels
+ return [l for l in self._labels if l is not None]
class OrderedProperties(object):
diff --git a/lib/sqlalchemy_nose/__init__.py b/lib/sqlalchemy_nose/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/lib/sqlalchemy_nose/__init__.py
diff --git a/lib/sqlalchemy/test/config.py b/lib/sqlalchemy_nose/config.py
index 7d528a04b..7d528a04b 100644
--- a/lib/sqlalchemy/test/config.py
+++ b/lib/sqlalchemy_nose/config.py
diff --git a/lib/sqlalchemy/test/noseplugin.py b/lib/sqlalchemy_nose/noseplugin.py
index 6a3106e69..8732142f7 100644
--- a/lib/sqlalchemy/test/noseplugin.py
+++ b/lib/sqlalchemy_nose/noseplugin.py
@@ -10,9 +10,9 @@ import StringIO
import nose.case
from nose.plugins import Plugin
-from sqlalchemy import util, log as sqla_log
-from sqlalchemy.test import testing, config, requires
-from sqlalchemy.test.config import (
+from sqlalchemy_nose import config
+
+from sqlalchemy_nose.config import (
_create_testing_engine, _engine_pool, _engine_strategy, _engine_uri, _list_dbs, _log,
_prep_testing_database, _require, _reverse_topological, _server_side_cursors,
_set_table_options, base_config, db, db_label, db_url, file_config, post_configure)
@@ -78,6 +78,10 @@ class NoseSQLAlchemy(Plugin):
self.options = options
def begin(self):
+ global testing, requires, util
+ from sqlalchemy.test import testing, requires
+ from sqlalchemy import util
+
testing.db = db
testing.requires = requires
diff --git a/setup.py b/setup.py
index 76cba0584..7a8a0f3f2 100644
--- a/setup.py
+++ b/setup.py
@@ -56,11 +56,11 @@ elif BUILD_CEXTENSIONS:
def find_packages(dir_):
packages = []
- for _dir, subdirectories, files in os.walk(os.path.join(dir_,
- 'sqlalchemy')):
- if '__init__.py' in files:
- lib, fragment = _dir.split(os.sep, 1)
- packages.append(fragment.replace(os.sep, '.'))
+ for pkg in ['sqlalchemy', 'sqlalchemy_nose']:
+ for _dir, subdirectories, files in os.walk(os.path.join(dir_, pkg)):
+ if '__init__.py' in files:
+ lib, fragment = _dir.split(os.sep, 1)
+ packages.append(fragment.replace(os.sep, '.'))
return packages
if sys.version_info < (2, 4):
@@ -90,7 +90,7 @@ setup(name = "SQLAlchemy",
test_suite = "nose.collector",
entry_points = {
'nose.plugins.0.10': [
- 'sqlalchemy = sqlalchemy.test.noseplugin:NoseSQLAlchemy',
+ 'sqlalchemy = sqlalchemy_nose.noseplugin:NoseSQLAlchemy',
]
},
diff --git a/sqla_nose.py b/sqla_nose.py
index 32604f97c..6d76978a4 100755
--- a/sqla_nose.py
+++ b/sqla_nose.py
@@ -3,23 +3,24 @@
nose runner script.
Only use this script if setuptools is not available, i.e. such as
-on Python 3K. Otherwise consult README.unittests for the
+on Python 3K. Otherwise consult README.unittests for the
recommended methods of running tests.
"""
+import sys
+
try:
- import sqlalchemy
+ from sqlalchemy_nose.noseplugin import NoseSQLAlchemy
except ImportError:
from os import path
- import sys
- sys.path.append(path.join(path.dirname(__file__), 'lib'))
+ sys.path.append(path.join(path.dirname(path.abspath(__file__)), 'lib'))
+ from sqlalchemy_nose.noseplugin import NoseSQLAlchemy
import nose
-from sqlalchemy.test.noseplugin import NoseSQLAlchemy
-from sqlalchemy.util import py3k
if __name__ == '__main__':
+ py3k = getattr(sys, 'py3kwarning', False) or sys.version_info >= (3, 0)
if py3k:
# this version breaks verbose output,
# but is the only API that nose3 currently supports
diff --git a/test/__init__.py b/test/__init__.py
index 8b1378917..e69de29bb 100644
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -1 +0,0 @@
-
diff --git a/test/dialect/test_mssql.py b/test/dialect/test_mssql.py
index 7f34b980a..e766a8301 100644
--- a/test/dialect/test_mssql.py
+++ b/test/dialect/test_mssql.py
@@ -9,7 +9,7 @@ from sqlalchemy import types, exc, schema
from sqlalchemy.orm import *
from sqlalchemy.sql import table, column
from sqlalchemy.databases import mssql
-from sqlalchemy.dialects.mssql import pyodbc, mxodbc
+from sqlalchemy.dialects.mssql import pyodbc, mxodbc, pymssql
from sqlalchemy.engine import url
from sqlalchemy.test import *
from sqlalchemy.test.testing import eq_, emits_warning_on, \
@@ -127,7 +127,6 @@ class CompileTest(TestBase, AssertsCompiledSQL):
s = table4.select(use_labels=True)
c = s.compile(dialect=self.__dialect__)
- print c.result_map
assert table4.c.rem_id \
in set(c.result_map['remote_owner_remotetable_rem_id'][1])
self.assert_compile(table4.select(),
@@ -151,7 +150,20 @@ class CompileTest(TestBase, AssertsCompiledSQL):
'remotetable_1.value FROM mytable JOIN '
'remote_owner.remotetable AS remotetable_1 '
'ON remotetable_1.rem_id = mytable.myid')
-
+
+ self.assert_compile(select([table4.c.rem_id,
+ table4.c.value]).apply_labels().union(select([table1.c.myid,
+ table1.c.description]).apply_labels()).alias().select(),
+ "SELECT anon_1.remote_owner_remotetable_rem_id, "
+ "anon_1.remote_owner_remotetable_value FROM "
+ "(SELECT remotetable_1.rem_id AS remote_owner_remotetable_rem_id, "
+ "remotetable_1.value AS remote_owner_remotetable_value "
+ "FROM remote_owner.remotetable AS remotetable_1 UNION "
+ "SELECT mytable.myid AS mytable_myid, mytable.description "
+ "AS mytable_description FROM mytable) AS anon_1"
+ )
+
+
def test_delete_schema(self):
metadata = MetaData()
tbl = Table('test', metadata, Column('id', Integer,
@@ -427,25 +439,80 @@ class ReflectionTest(TestBase, ComparesTables):
finally:
meta.drop_all()
+ @testing.provide_metadata
def test_identity(self):
- meta = MetaData(testing.db)
table = Table(
- 'identity_test', meta,
+ 'identity_test', metadata,
Column('col1', Integer, Sequence('fred', 2, 3), primary_key=True)
)
table.create()
meta2 = MetaData(testing.db)
- try:
- table2 = Table('identity_test', meta2, autoload=True)
- sequence = isinstance(table2.c['col1'].default, schema.Sequence) \
- and table2.c['col1'].default
- assert sequence.start == 2
- assert sequence.increment == 3
- finally:
- table.drop()
-
+ table2 = Table('identity_test', meta2, autoload=True)
+ sequence = isinstance(table2.c['col1'].default, schema.Sequence) \
+ and table2.c['col1'].default
+ assert sequence.start == 2
+ assert sequence.increment == 3
+
+ @testing.emits_warning("Did not recognize")
+ @testing.provide_metadata
+ def test_skip_types(self):
+ testing.db.execute("""
+ create table foo (id integer primary key, data xml)
+ """)
+ t1 = Table('foo', metadata, autoload=True)
+ assert isinstance(t1.c.id.type, Integer)
+ assert isinstance(t1.c.data.type, types.NullType)
+
+ @testing.provide_metadata
+ def test_indexes_cols(self):
+
+ t1 = Table('t', metadata, Column('x', Integer), Column('y', Integer))
+ Index('foo', t1.c.x, t1.c.y)
+ metadata.create_all()
+
+ m2 = MetaData()
+ t2 = Table('t', m2, autoload=True, autoload_with=testing.db)
+
+ eq_(
+ set(list(t2.indexes)[0].columns),
+ set([t2.c['x'], t2.c.y])
+ )
+ @testing.provide_metadata
+ def test_indexes_cols_with_commas(self):
+
+ t1 = Table('t', metadata,
+ Column('x, col', Integer, key='x'),
+ Column('y', Integer)
+ )
+ Index('foo', t1.c.x, t1.c.y)
+ metadata.create_all()
+
+ m2 = MetaData()
+ t2 = Table('t', m2, autoload=True, autoload_with=testing.db)
+
+ eq_(
+ set(list(t2.indexes)[0].columns),
+ set([t2.c['x, col'], t2.c.y])
+ )
+
+ @testing.provide_metadata
+ def test_indexes_cols_with_spaces(self):
+
+ t1 = Table('t', metadata, Column('x col', Integer, key='x'),
+ Column('y', Integer))
+ Index('foo', t1.c.x, t1.c.y)
+ metadata.create_all()
+
+ m2 = MetaData()
+ t2 = Table('t', m2, autoload=True, autoload_with=testing.db)
+
+ eq_(
+ set(list(t2.indexes)[0].columns),
+ set([t2.c['x col'], t2.c.y])
+ )
+
class QueryUnicodeTest(TestBase):
__only_on__ = 'mssql'
@@ -799,12 +866,10 @@ class MatchTest(TestBase, AssertsCompiledSQL):
class ParseConnectTest(TestBase, AssertsCompiledSQL):
- __only_on__ = 'mssql'
-
@classmethod
def setup_class(cls):
global dialect
- dialect = pyodbc.MSDialect_pyodbc()
+ dialect = pyodbc.dialect()
def test_pyodbc_connect_dsn_trusted(self):
u = url.make_url('mssql://mydsn')
@@ -890,7 +955,27 @@ class ParseConnectTest(TestBase, AssertsCompiledSQL):
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
+
+ def test_pymssql_port_setting(self):
+ dialect = pymssql.dialect()
+
+ u = \
+ url.make_url('mssql+pymssql://scott:tiger@somehost/test')
+ connection = dialect.create_connect_args(u)
+ eq_(
+ [[], {'host': 'somehost', 'password': 'tiger',
+ 'user': 'scott', 'database': 'test'}], connection
+ )
+ u = \
+ url.make_url('mssql+pymssql://scott:tiger@somehost:5000/test')
+ connection = dialect.create_connect_args(u)
+ eq_(
+ [[], {'host': 'somehost:5000', 'password': 'tiger',
+ 'user': 'scott', 'database': 'test'}], connection
+ )
+
+ @testing.only_on(['mssql+pyodbc', 'mssql+pymssql'], "FreeTDS specific test")
def test_bad_freetds_warning(self):
engine = engines.testing_engine()
diff --git a/test/dialect/test_mysql.py b/test/dialect/test_mysql.py
index 7c4cc2309..78e1b9ab0 100644
--- a/test/dialect/test_mysql.py
+++ b/test/dialect/test_mysql.py
@@ -763,6 +763,9 @@ class ReflectionTest(TestBase, AssertsExecutionResults):
Column('c4', TIMESTAMP, DefaultClause('2009-04-05 12:00:00'
)),
Column('c5', TIMESTAMP),
+ Column('c6', TIMESTAMP,
+ DefaultClause(sql.text("CURRENT_TIMESTAMP "
+ "ON UPDATE CURRENT_TIMESTAMP"))),
)
def_table.create()
try:
@@ -780,6 +783,13 @@ class ReflectionTest(TestBase, AssertsExecutionResults):
assert str(reflected.c.c3.server_default.arg) == "'abc'"
assert str(reflected.c.c4.server_default.arg) \
== "'2009-04-05 12:00:00'"
+ assert reflected.c.c5.default is None
+ assert reflected.c.c5.server_default is None
+ assert reflected.c.c6.default is None
+ eq_(
+ str(reflected.c.c6.server_default.arg).upper(),
+ "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
+ )
reflected.create()
try:
reflected2 = Table('mysql_def', MetaData(testing.db),
@@ -791,6 +801,13 @@ class ReflectionTest(TestBase, AssertsExecutionResults):
assert str(reflected2.c.c3.server_default.arg) == "'abc'"
assert str(reflected2.c.c4.server_default.arg) \
== "'2009-04-05 12:00:00'"
+ assert reflected.c.c5.default is None
+ assert reflected.c.c5.server_default is None
+ assert reflected.c.c6.default is None
+ eq_(
+ str(reflected.c.c6.server_default.arg).upper(),
+ "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
+ )
def test_reflection_with_table_options(self):
comment = r"""Comment types type speedily ' " \ '' Fun!"""
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index 29d18b988..04b9d3274 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -376,6 +376,7 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
assert dialect.use_ansi
dialect.initialize(testing.db.connect())
+ assert not dialect.implicit_returning
assert not dialect._supports_char_length
assert not dialect._supports_nchar
assert not dialect.use_ansi
@@ -383,6 +384,12 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
self.assert_compile(Unicode(50),"VARCHAR(50)",dialect=dialect)
self.assert_compile(UnicodeText(),"CLOB",dialect=dialect)
+ dialect = oracle.dialect(implicit_returning=True)
+ dialect._get_server_version_info = server_version_info
+ dialect.initialize(testing.db.connect())
+ assert dialect.implicit_returning
+
+
def test_default_flags(self):
"""test with no initialization or server version info"""
dialect = oracle.dialect()
diff --git a/test/dialect/test_postgresql.py b/test/dialect/test_postgresql.py
index 9ad46c189..e20274aef 100644
--- a/test/dialect/test_postgresql.py
+++ b/test/dialect/test_postgresql.py
@@ -909,7 +909,10 @@ class DomainReflectionTest(TestBase, AssertsExecutionResults):
con = testing.db.connect()
for ddl in \
'CREATE DOMAIN testdomain INTEGER NOT NULL DEFAULT 42', \
- 'CREATE DOMAIN test_schema.testdomain INTEGER DEFAULT 0':
+ 'CREATE DOMAIN test_schema.testdomain INTEGER DEFAULT 0', \
+ "CREATE TYPE testtype AS ENUM ('test')", \
+ 'CREATE DOMAIN enumdomain AS testtype'\
+ :
try:
con.execute(ddl)
except exc.SQLError, e:
@@ -923,6 +926,8 @@ class DomainReflectionTest(TestBase, AssertsExecutionResults):
con.execute('CREATE TABLE crosschema (question integer, answer '
'test_schema.testdomain)')
+ con.execute('CREATE TABLE enum_test (id integer, data enumdomain)')
+
@classmethod
def teardown_class(cls):
con = testing.db.connect()
@@ -931,7 +936,10 @@ class DomainReflectionTest(TestBase, AssertsExecutionResults):
con.execute('DROP TABLE crosschema')
con.execute('DROP DOMAIN testdomain')
con.execute('DROP DOMAIN test_schema.testdomain')
-
+ con.execute("DROP TABLE enum_test")
+ con.execute("DROP DOMAIN enumdomain")
+ con.execute("DROP TYPE testtype")
+
def test_table_is_reflected(self):
metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True)
@@ -946,7 +954,15 @@ class DomainReflectionTest(TestBase, AssertsExecutionResults):
"Reflected default value didn't equal expected value")
assert not table.columns.answer.nullable, \
'Expected reflected column to not be nullable.'
-
+
+ def test_enum_domain_is_reflected(self):
+ metadata = MetaData(testing.db)
+ table = Table('enum_test', metadata, autoload=True)
+ eq_(
+ table.c.data.type.enums,
+ ('test', )
+ )
+
def test_table_is_reflected_test_schema(self):
metadata = MetaData(testing.db)
table = Table('testtable', metadata, autoload=True,
@@ -990,6 +1006,7 @@ class DomainReflectionTest(TestBase, AssertsExecutionResults):
finally:
postgresql.PGDialect.ischema_names = ischema_names
+
class MiscTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
__only_on__ = 'postgresql'
@@ -1492,8 +1509,8 @@ class ArrayTest(TestBase, AssertsExecutionResults):
metadata = MetaData(testing.db)
arrtable = Table('arrtable', metadata, Column('id', Integer,
primary_key=True), Column('intarr',
- postgresql.PGArray(Integer)), Column('strarr',
- postgresql.PGArray(Unicode()), nullable=False))
+ postgresql.ARRAY(Integer)), Column('strarr',
+ postgresql.ARRAY(Unicode()), nullable=False))
metadata.create_all()
def teardown(self):
@@ -1506,8 +1523,8 @@ class ArrayTest(TestBase, AssertsExecutionResults):
def test_reflect_array_column(self):
metadata2 = MetaData(testing.db)
tbl = Table('arrtable', metadata2, autoload=True)
- assert isinstance(tbl.c.intarr.type, postgresql.PGArray)
- assert isinstance(tbl.c.strarr.type, postgresql.PGArray)
+ assert isinstance(tbl.c.intarr.type, postgresql.ARRAY)
+ assert isinstance(tbl.c.strarr.type, postgresql.ARRAY)
assert isinstance(tbl.c.intarr.type.item_type, Integer)
assert isinstance(tbl.c.strarr.type.item_type, String)
@@ -1575,7 +1592,7 @@ class ArrayTest(TestBase, AssertsExecutionResults):
footable = Table('foo', metadata, Column('id', Integer,
primary_key=True), Column('intarr',
- postgresql.PGArray(Integer), nullable=True))
+ postgresql.ARRAY(Integer), nullable=True))
mapper(Foo, footable)
metadata.create_all()
sess = create_session()
@@ -1607,7 +1624,41 @@ class ArrayTest(TestBase, AssertsExecutionResults):
foo.id = 2
sess.add(foo)
sess.flush()
-
+
+ @testing.provide_metadata
+ def test_tuple_flag(self):
+ assert_raises_message(
+ exc.ArgumentError,
+ "mutable must be set to False if as_tuple is True.",
+ postgresql.ARRAY, Integer, as_tuple=True)
+
+ t1 = Table('t1', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', postgresql.ARRAY(String(5), as_tuple=True, mutable=False)),
+ Column('data2', postgresql.ARRAY(Numeric(asdecimal=False), as_tuple=True, mutable=False)),
+ )
+ metadata.create_all()
+ testing.db.execute(t1.insert(), id=1, data=["1","2","3"], data2=[5.4, 5.6])
+ testing.db.execute(t1.insert(), id=2, data=["4", "5", "6"], data2=[1.0])
+ testing.db.execute(t1.insert(), id=3, data=[["4", "5"], ["6", "7"]], data2=[[5.4, 5.6], [1.0, 1.1]])
+
+ r = testing.db.execute(t1.select().order_by(t1.c.id)).fetchall()
+ eq_(
+ r,
+ [
+ (1, ('1', '2', '3'), (5.4, 5.6)),
+ (2, ('4', '5', '6'), (1.0,)),
+ (3, (('4', '5'), ('6', '7')), ((5.4, 5.6), (1.0, 1.1)))
+ ]
+ )
+ # hashable
+ eq_(
+ set(row[1] for row in r),
+ set([('1', '2', '3'), ('4', '5', '6'), (('4', '5'), ('6', '7'))])
+ )
+
+
+
class TimestampTest(TestBase, AssertsExecutionResults):
__only_on__ = 'postgresql'
diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py
index 7fb8d8a90..78b75ad2f 100644
--- a/test/engine/test_parseconnect.py
+++ b/test/engine/test_parseconnect.py
@@ -3,6 +3,7 @@ import ConfigParser
import StringIO
import sqlalchemy.engine.url as url
from sqlalchemy import create_engine, engine_from_config
+from sqlalchemy.engine import _coerce_config
import sqlalchemy as tsa
from sqlalchemy.test import TestBase
@@ -138,6 +139,21 @@ pool_timeout=10
'z=somevalue')
assert e.echo is True
+ for param, values in [
+ ('convert_unicode', ('true', 'false', 'force')),
+ ('echo', ('true', 'false', 'debug')),
+ ('echo_pool', ('true', 'false', 'debug')),
+ ('use_native_unicode', ('true', 'false')),
+ ]:
+ for value in values:
+ config = {
+ 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test',
+ 'sqlalchemy.%s' % param : value
+ }
+ cfg = _coerce_config(config, 'sqlalchemy.')
+ assert cfg[param] == {'true':True, 'false':False}.get(value, value)
+
+
def test_custom(self):
dbapi = MockDBAPI(foober=12, lala=18, hoho={'this': 'dict'},
fooz='somevalue')
diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py
index 6a8e7cf8a..d0d6e31e1 100644
--- a/test/engine/test_reflection.py
+++ b/test/engine/test_reflection.py
@@ -760,28 +760,50 @@ class ReflectionTest(TestBase, ComparesTables):
finally:
m1.drop_all()
+ @testing.provide_metadata
def test_views(self):
- meta = MetaData(testing.db)
- users, addresses = createTables(meta, None)
- meta.create_all()
- createViews(meta.bind, None)
+ users, addresses = createTables(metadata, None)
try:
+ metadata.create_all()
+ createViews(metadata.bind, None)
m2 = MetaData(testing.db)
users_v = Table("users_v", m2, autoload=True)
addresses_v = Table("email_addresses_v", m2, autoload=True)
-
+
for c1, c2 in zip(users.c, users_v.c):
eq_(c1.name, c2.name)
self.assert_types_base(c1, c2)
-
+
for c1, c2 in zip(addresses.c, addresses_v.c):
eq_(c1.name, c2.name)
self.assert_types_base(c1, c2)
+ finally:
+ dropViews(metadata.bind)
+
+ @testing.provide_metadata
+ def test_reflect_all_with_views(self):
+ users, addresses = createTables(metadata, None)
+ try:
+ metadata.create_all()
+ createViews(metadata.bind, None)
+ m2 = MetaData(testing.db)
+
+ m2.reflect(views=False)
+ eq_(
+ set(m2.tables),
+ set([u'users', u'email_addresses'])
+ )
+ m2 = MetaData(testing.db)
+ m2.reflect(views=True)
+ eq_(
+ set(m2.tables),
+ set([u'email_addresses_v', u'users_v',
+ u'users', u'email_addresses'])
+ )
finally:
- dropViews(meta.bind, None)
- meta.drop_all()
-
+ dropViews(metadata.bind)
+
class CreateDropTest(TestBase):
@classmethod
diff --git a/test/ext/test_declarative.py b/test/ext/test_declarative.py
index 0202aa69f..72e2edf30 100644
--- a/test/ext/test_declarative.py
+++ b/test/ext/test_declarative.py
@@ -634,7 +634,7 @@ class DeclarativeTest(DeclarativeTestBase):
def test_table_args(self):
def err():
- class Foo(Base):
+ class Foo1(Base):
__tablename__ = 'foo'
__table_args__ = ForeignKeyConstraint(['id'], ['foo.id'
@@ -644,13 +644,13 @@ class DeclarativeTest(DeclarativeTestBase):
assert_raises_message(sa.exc.ArgumentError,
'Tuple form of __table_args__ is ', err)
- class Foo(Base):
+ class Foo2(Base):
__tablename__ = 'foo'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column('id', Integer, primary_key=True)
- assert Foo.__table__.kwargs['mysql_engine'] == 'InnoDB'
+ assert Foo2.__table__.kwargs['mysql_engine'] == 'InnoDB'
class Bar(Base):
@@ -659,7 +659,7 @@ class DeclarativeTest(DeclarativeTestBase):
{'mysql_engine': 'InnoDB'}
id = Column('id', Integer, primary_key=True)
- assert Bar.__table__.c.id.references(Foo.__table__.c.id)
+ assert Bar.__table__.c.id.references(Foo2.__table__.c.id)
assert Bar.__table__.kwargs['mysql_engine'] == 'InnoDB'
def test_expression(self):
@@ -1084,6 +1084,21 @@ class DeclarativeTest(DeclarativeTestBase):
)).one()
eq_(rt, u1)
+ @testing.emits_warning(
+ "The classname 'Test' is already in the registry "
+ "of this declarative base, mapped to "
+ "<class 'test.ext.test_declarative.Test'>"
+ )
+ def test_duplicate_classes_in_base(self):
+
+ class Test(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+
+ class Test(Base):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+
class DeclarativeInheritanceTest(DeclarativeTestBase):
def test_we_must_copy_mapper_args(self):
@@ -2155,7 +2170,28 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(obj.name, 'testing')
eq_(obj.foo(), 'bar1')
eq_(obj.baz, 'fu')
+
+ def test_mixin_overrides(self):
+ """test a mixin that overrides a column on a superclass."""
+
+ class MixinA(object):
+ foo = Column(String(50))
+
+ class MixinB(MixinA):
+ foo = Column(Integer)
+ class MyModelA(Base, MixinA):
+ __tablename__ = 'testa'
+ id = Column(Integer, primary_key=True)
+
+ class MyModelB(Base, MixinB):
+ __tablename__ = 'testb'
+ id = Column(Integer, primary_key=True)
+
+ eq_(MyModelA.__table__.c.foo.type.__class__, String)
+ eq_(MyModelB.__table__.c.foo.type.__class__, Integer)
+
+
def test_not_allowed(self):
class MyMixin:
@@ -3084,4 +3120,4 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
def test_relationship_primryjoin(self):
self._test_relationship(True)
- \ No newline at end of file
+
diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py
index b96014a57..b70ad0973 100644
--- a/test/orm/test_eager_relations.py
+++ b/test/orm/test_eager_relations.py
@@ -731,6 +731,21 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
"FROM (SELECT users.id AS users_id, users.name AS users_name "
"FROM users "
" LIMIT 10) AS anon_1 LEFT OUTER JOIN orders AS orders_1 ON anon_1.users_id = "
+ "orders_1.user_id LEFT OUTER JOIN addresses AS addresses_1 ON addresses_1.id = orders_1.address_id"
+ ,use_default_dialect=True
+ )
+
+ self.assert_compile(
+ sess.query(User).options(joinedload("orders", innerjoin=True),
+ joinedload("orders.address", innerjoin=True)).limit(10),
+ "SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name, "
+ "addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address, orders_1.id AS orders_1_id, "
+ "orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "
+ "orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen "
+ "FROM (SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users "
+ " LIMIT 10) AS anon_1 JOIN orders AS orders_1 ON anon_1.users_id = "
"orders_1.user_id JOIN addresses AS addresses_1 ON addresses_1.id = orders_1.address_id"
,use_default_dialect=True
)
@@ -919,7 +934,8 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_inner_join(self):
mapper(User, users, properties = dict(
- addresses = relationship(mapper(Address, addresses), lazy='joined', innerjoin=True, order_by=addresses.c.id)
+ addresses = relationship(mapper(Address, addresses), lazy='joined',
+ innerjoin=True, order_by=addresses.c.id)
))
sess = create_session()
eq_(
@@ -939,6 +955,100 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
, use_default_dialect=True)
@testing.resolve_artifact_names
+ def test_inner_join_chaining_options(self):
+ mapper(User, users, properties = dict(
+ orders =relationship(Order, innerjoin=True,
+ lazy=False)
+ ))
+ mapper(Order, orders, properties=dict(
+ items=relationship(Item, secondary=order_items, lazy=False,
+ innerjoin=True)
+ ))
+ mapper(Item, items)
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(User),
+ "SELECT users.id AS users_id, users.name AS users_name, items_1.id AS "
+ "items_1_id, items_1.description AS items_1_description, orders_1.id AS "
+ "orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
+ "orders_1_address_id, orders_1.description AS orders_1_description, "
+ "orders_1.isopen AS orders_1_isopen FROM users JOIN orders AS orders_1 ON "
+ "users.id = orders_1.user_id JOIN order_items AS order_items_1 ON orders_1.id = "
+ "order_items_1.order_id JOIN items AS items_1 ON items_1.id = "
+ "order_items_1.item_id",
+ use_default_dialect=True
+ )
+
+ self.assert_compile(
+ sess.query(User).options(joinedload(User.orders, innerjoin=False)),
+ "SELECT users.id AS users_id, users.name AS users_name, items_1.id AS "
+ "items_1_id, items_1.description AS items_1_description, orders_1.id AS "
+ "orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
+ "orders_1_address_id, orders_1.description AS orders_1_description, "
+ "orders_1.isopen AS orders_1_isopen FROM users LEFT OUTER JOIN orders AS orders_1 ON "
+ "users.id = orders_1.user_id LEFT OUTER JOIN order_items AS order_items_1 ON orders_1.id = "
+ "order_items_1.order_id LEFT OUTER JOIN items AS items_1 ON items_1.id = "
+ "order_items_1.item_id",
+ use_default_dialect=True
+ )
+
+ self.assert_compile(
+ sess.query(User).options(joinedload(User.orders, Order.items, innerjoin=False)),
+ "SELECT users.id AS users_id, users.name AS users_name, items_1.id AS "
+ "items_1_id, items_1.description AS items_1_description, orders_1.id AS "
+ "orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
+ "orders_1_address_id, orders_1.description AS orders_1_description, "
+ "orders_1.isopen AS orders_1_isopen FROM users JOIN orders AS orders_1 ON "
+ "users.id = orders_1.user_id LEFT OUTER JOIN order_items AS order_items_1 ON orders_1.id = "
+ "order_items_1.order_id LEFT OUTER JOIN items AS items_1 ON items_1.id = "
+ "order_items_1.item_id",
+ use_default_dialect=True
+ )
+
+ @testing.resolve_artifact_names
+ def test_inner_join_chaining_fixed(self):
+ mapper(User, users, properties = dict(
+ orders =relationship(Order, lazy=False)
+ ))
+ mapper(Order, orders, properties=dict(
+ items=relationship(Item, secondary=order_items, lazy=False,
+ innerjoin=True)
+ ))
+ mapper(Item, items)
+
+ sess = create_session()
+
+ # joining from user, its all LEFT OUTER JOINs
+ self.assert_compile(
+ sess.query(User),
+ "SELECT users.id AS users_id, users.name AS users_name, items_1.id AS "
+ "items_1_id, items_1.description AS items_1_description, orders_1.id AS "
+ "orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
+ "orders_1_address_id, orders_1.description AS orders_1_description, "
+ "orders_1.isopen AS orders_1_isopen FROM users LEFT OUTER JOIN orders AS orders_1 ON "
+ "users.id = orders_1.user_id LEFT OUTER JOIN order_items AS order_items_1 ON orders_1.id = "
+ "order_items_1.order_id LEFT OUTER JOIN items AS items_1 ON items_1.id = "
+ "order_items_1.item_id",
+ use_default_dialect=True
+ )
+
+ # joining just from Order, innerjoin=True can be respected
+ self.assert_compile(
+ sess.query(Order),
+ "SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
+ "orders.address_id AS orders_address_id, orders.description AS "
+ "orders_description, orders.isopen AS orders_isopen, items_1.id "
+ "AS items_1_id, items_1.description AS items_1_description FROM "
+ "orders JOIN order_items AS order_items_1 ON orders.id = "
+ "order_items_1.order_id JOIN items AS items_1 ON items_1.id = "
+ "order_items_1.item_id",
+ use_default_dialect=True
+ )
+
+
+
+ @testing.resolve_artifact_names
def test_inner_join_options(self):
mapper(User, users, properties = dict(
orders =relationship(Order, backref=backref('user', innerjoin=True), order_by=orders.c.id)
diff --git a/test/orm/test_immediate_load.py b/test/orm/test_immediate_load.py
new file mode 100644
index 000000000..f85208bff
--- /dev/null
+++ b/test/orm/test_immediate_load.py
@@ -0,0 +1,49 @@
+"""basic tests of lazy loaded attributes"""
+
+from sqlalchemy.test import testing
+from sqlalchemy.orm import mapper, relationship, create_session, immediateload
+from sqlalchemy.test.testing import eq_
+from test.orm import _fixtures
+
+
+class ImmediateTest(_fixtures.FixtureTest):
+ run_inserts = 'once'
+ run_deletes = None
+
+ @testing.resolve_artifact_names
+ def test_basic_option(self):
+ mapper(Address, addresses)
+ mapper(User, users, properties={
+ 'addresses':relationship(Address)
+ })
+ sess = create_session()
+
+ l = sess.query(User).options(immediateload(User.addresses)).filter(users.c.id==7).all()
+ eq_(len(sess.identity_map), 2)
+
+ sess.close()
+
+ eq_(
+ [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])],
+ l
+ )
+
+
+ @testing.resolve_artifact_names
+ def test_basic(self):
+ mapper(Address, addresses)
+ mapper(User, users, properties={
+ 'addresses':relationship(Address, lazy='immediate')
+ })
+ sess = create_session()
+
+ l = sess.query(User).filter(users.c.id==7).all()
+ eq_(len(sess.identity_map), 2)
+ sess.close()
+
+ eq_(
+ [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])],
+ l
+ )
+
+
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 91c09be63..d96fa7384 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -664,6 +664,29 @@ class ExpressionTest(QueryTest, AssertsCompiledSQL):
q = session.query(User).filter(User.id==q)
eq_(User(id=7), q.one())
+
+ def test_label(self):
+ session = create_session()
+
+ q = session.query(User.id).filter(User.id==7).label('foo')
+ self.assert_compile(
+ session.query(q),
+ "SELECT (SELECT users.id FROM users WHERE users.id = :id_1) AS foo",
+ use_default_dialect=True
+ )
+
+ def test_as_scalar(self):
+ session = create_session()
+
+ q = session.query(User.id).filter(User.id==7).as_scalar()
+
+ self.assert_compile(session.query(User).filter(User.id.in_(q)),
+ 'SELECT users.id AS users_id, users.name '
+ 'AS users_name FROM users WHERE users.id '
+ 'IN (SELECT users.id FROM users WHERE '
+ 'users.id = :id_1)',
+ use_default_dialect=True)
+
def test_param_transfer(self):
session = create_session()
@@ -2863,18 +2886,34 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
eq_(list(q2), [(u'jack',), (u'ed',)])
q = sess.query(User)
- q2 = q.order_by(User.id).values(User.name, User.name + " " + cast(User.id, String(50)))
- eq_(list(q2), [(u'jack', u'jack 7'), (u'ed', u'ed 8'), (u'fred', u'fred 9'), (u'chuck', u'chuck 10')])
-
- q2 = q.join('addresses').filter(User.name.like('%e%')).order_by(User.id, Address.id).values(User.name, Address.email_address)
- eq_(list(q2), [(u'ed', u'ed@wood.com'), (u'ed', u'ed@bettyboop.com'), (u'ed', u'ed@lala.com'), (u'fred', u'fred@fred.com')])
-
- q2 = q.join('addresses').filter(User.name.like('%e%')).order_by(desc(Address.email_address)).slice(1, 3).values(User.name, Address.email_address)
+ q2 = q.order_by(User.id).\
+ values(User.name, User.name + " " + cast(User.id, String(50)))
+ eq_(
+ list(q2),
+ [(u'jack', u'jack 7'), (u'ed', u'ed 8'),
+ (u'fred', u'fred 9'), (u'chuck', u'chuck 10')]
+ )
+
+ q2 = q.join('addresses').\
+ filter(User.name.like('%e%')).\
+ order_by(User.id, Address.id).\
+ values(User.name, Address.email_address)
+ eq_(list(q2),
+ [(u'ed', u'ed@wood.com'), (u'ed', u'ed@bettyboop.com'),
+ (u'ed', u'ed@lala.com'), (u'fred', u'fred@fred.com')])
+
+ q2 = q.join('addresses').\
+ filter(User.name.like('%e%')).\
+ order_by(desc(Address.email_address)).\
+ slice(1, 3).values(User.name, Address.email_address)
eq_(list(q2), [(u'ed', u'ed@wood.com'), (u'ed', u'ed@lala.com')])
adalias = aliased(Address)
- q2 = q.join(('addresses', adalias)).filter(User.name.like('%e%')).values(User.name, adalias.email_address)
- eq_(list(q2), [(u'ed', u'ed@wood.com'), (u'ed', u'ed@bettyboop.com'), (u'ed', u'ed@lala.com'), (u'fred', u'fred@fred.com')])
+ q2 = q.join(('addresses', adalias)).\
+ filter(User.name.like('%e%')).\
+ values(User.name, adalias.email_address)
+ eq_(list(q2), [(u'ed', u'ed@wood.com'), (u'ed', u'ed@bettyboop.com'),
+ (u'ed', u'ed@lala.com'), (u'fred', u'fred@fred.com')])
q2 = q.values(func.count(User.name))
assert q2.next() == (4,)
@@ -2883,11 +2922,15 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
eq_(list(q2), [(u'ed', u'ed', u'ed')])
# using User.xxx is alised against "sel", so this query returns nothing
- q2 = q.select_from(sel).filter(User.id==8).filter(User.id>sel.c.id).values(User.name, sel.c.name, User.name)
+ q2 = q.select_from(sel).\
+ filter(User.id==8).\
+ filter(User.id>sel.c.id).values(User.name, sel.c.name, User.name)
eq_(list(q2), [])
# whereas this uses users.c.xxx, is not aliased and creates a new join
- q2 = q.select_from(sel).filter(users.c.id==8).filter(users.c.id>sel.c.id).values(users.c.name, sel.c.name, User.name)
+ q2 = q.select_from(sel).\
+ filter(users.c.id==8).\
+ filter(users.c.id>sel.c.id).values(users.c.name, sel.c.name, User.name)
eq_(list(q2), [(u'ed', u'jack', u'jack')])
@testing.fails_on('mssql', 'FIXME: unknown')
@@ -2899,19 +2942,34 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sel = users.select(User.id.in_([7, 8])).alias()
q = sess.query(User)
u2 = aliased(User)
- q2 = q.select_from(sel).filter(u2.id>1).order_by(User.id, sel.c.id, u2.id).values(User.name, sel.c.name, u2.name)
- eq_(list(q2), [(u'jack', u'jack', u'jack'), (u'jack', u'jack', u'ed'), (u'jack', u'jack', u'fred'), (u'jack', u'jack', u'chuck'), (u'ed', u'ed', u'jack'), (u'ed', u'ed', u'ed'), (u'ed', u'ed', u'fred'), (u'ed', u'ed', u'chuck')])
+ q2 = q.select_from(sel).\
+ filter(u2.id>1).\
+ order_by(User.id, sel.c.id, u2.id).\
+ values(User.name, sel.c.name, u2.name)
+ eq_(list(q2), [(u'jack', u'jack', u'jack'), (u'jack', u'jack', u'ed'),
+ (u'jack', u'jack', u'fred'), (u'jack', u'jack', u'chuck'),
+ (u'ed', u'ed', u'jack'), (u'ed', u'ed', u'ed'),
+ (u'ed', u'ed', u'fred'), (u'ed', u'ed', u'chuck')])
@testing.fails_on('mssql', 'FIXME: unknown')
- @testing.fails_on('oracle', "Oracle doesn't support boolean expressions as columns")
- @testing.fails_on('postgresql+pg8000', "pg8000 parses the SQL itself before passing on to PG, doesn't parse this")
- @testing.fails_on('postgresql+zxjdbc', "zxjdbc parses the SQL itself before passing on to PG, doesn't parse this")
+ @testing.fails_on('oracle',
+ "Oracle doesn't support boolean expressions as "
+ "columns")
+ @testing.fails_on('postgresql+pg8000',
+ "pg8000 parses the SQL itself before passing on "
+ "to PG, doesn't parse this")
+ @testing.fails_on('postgresql+zxjdbc',
+ "zxjdbc parses the SQL itself before passing on "
+ "to PG, doesn't parse this")
def test_values_with_boolean_selects(self):
- """Tests a values clause that works with select boolean evaluations"""
+ """Tests a values clause that works with select boolean
+ evaluations"""
sess = create_session()
q = sess.query(User)
- q2 = q.group_by(User.name.like('%j%')).order_by(desc(User.name.like('%j%'))).values(User.name.like('%j%'), func.count(User.name.like('%j%')))
+ q2 = q.group_by(User.name.like('%j%')).\
+ order_by(desc(User.name.like('%j%'))).\
+ values(User.name.like('%j%'), func.count(User.name.like('%j%')))
eq_(list(q2), [(True, 1), (False, 3)])
q2 = q.order_by(desc(User.name.like('%j%'))).values(User.name.like('%j%'))
@@ -2933,7 +2991,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
# we don't want Address to be outside of the subquery here
eq_(
list(sess.query(User, subq)[0:3]),
- [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3), (User(id=9,name=u'fred'), 1)]
+ [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3),
+ (User(id=9,name=u'fred'), 1)]
)
# same thing without the correlate, as it should
@@ -2945,7 +3004,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
# we don't want Address to be outside of the subquery here
eq_(
list(sess.query(User, subq)[0:3]),
- [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3), (User(id=9,name=u'fred'), 1)]
+ [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3),
+ (User(id=9,name=u'fred'), 1)]
)
def test_tuple_labeling(self):
@@ -2957,21 +3017,21 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
if pickled is not False:
row = util.pickle.loads(util.pickle.dumps(row, pickled))
- eq_(set(row.keys()), set(['User', 'Address']))
+ eq_(row.keys(), ['User', 'Address'])
eq_(row.User, row[0])
eq_(row.Address, row[1])
for row in sess.query(User.name, User.id.label('foobar')):
if pickled is not False:
row = util.pickle.loads(util.pickle.dumps(row, pickled))
- eq_(set(row.keys()), set(['name', 'foobar']))
+ eq_(row.keys(), ['name', 'foobar'])
eq_(row.name, row[0])
eq_(row.foobar, row[1])
for row in sess.query(User).values(User.name, User.id.label('foobar')):
if pickled is not False:
row = util.pickle.loads(util.pickle.dumps(row, pickled))
- eq_(set(row.keys()), set(['name', 'foobar']))
+ eq_(row.keys(), ['name', 'foobar'])
eq_(row.name, row[0])
eq_(row.foobar, row[1])
@@ -2979,17 +3039,23 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
for row in sess.query(User, oalias).join(User.orders).all():
if pickled is not False:
row = util.pickle.loads(util.pickle.dumps(row, pickled))
- eq_(set(row.keys()), set(['User']))
+ eq_(row.keys(), ['User'])
eq_(row.User, row[0])
oalias = aliased(Order, name='orders')
for row in sess.query(User, oalias).join(User.orders).all():
if pickled is not False:
row = util.pickle.loads(util.pickle.dumps(row, pickled))
- eq_(set(row.keys()), set(['User', 'orders']))
+ eq_(row.keys(), ['User', 'orders'])
eq_(row.User, row[0])
eq_(row.orders, row[1])
+ # test here that first col is not labeled, only
+ # one name in keys, matches correctly
+ for row in sess.query(User.name + 'hoho', User.name):
+ eq_(row.keys(), ['name'])
+ eq_(row[0], row.name + 'hoho')
+
if pickled is not False:
ret = sess.query(User, Address).join(User.addresses).all()
util.pickle.loads(util.pickle.dumps(ret, pickled))
@@ -3010,30 +3076,47 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
(u'fred', u'fred@fred.com')
])
- eq_(sess.query(User.name, func.count(Address.email_address)).outerjoin(User.addresses).group_by(User.id, User.name).order_by(User.id).all(),
+ eq_(sess.query(User.name, func.count(Address.email_address)).\
+ outerjoin(User.addresses).group_by(User.id, User.name).\
+ order_by(User.id).all(),
[(u'jack', 1), (u'ed', 3), (u'fred', 1), (u'chuck', 0)]
)
- eq_(sess.query(User, func.count(Address.email_address)).outerjoin(User.addresses).group_by(User).order_by(User.id).all(),
- [(User(name='jack',id=7), 1), (User(name='ed',id=8), 3), (User(name='fred',id=9), 1), (User(name='chuck',id=10), 0)]
+ eq_(sess.query(User, func.count(Address.email_address)).\
+ outerjoin(User.addresses).group_by(User).\
+ order_by(User.id).all(),
+ [(User(name='jack',id=7), 1), (User(name='ed',id=8), 3),
+ (User(name='fred',id=9), 1), (User(name='chuck',id=10), 0)]
)
- eq_(sess.query(func.count(Address.email_address), User).outerjoin(User.addresses).group_by(User).order_by(User.id).all(),
- [(1, User(name='jack',id=7)), (3, User(name='ed',id=8)), (1, User(name='fred',id=9)), (0, User(name='chuck',id=10))]
+ eq_(sess.query(func.count(Address.email_address), User).\
+ outerjoin(User.addresses).group_by(User).\
+ order_by(User.id).all(),
+ [(1, User(name='jack',id=7)), (3, User(name='ed',id=8)),
+ (1, User(name='fred',id=9)), (0, User(name='chuck',id=10))]
)
adalias = aliased(Address)
- eq_(sess.query(User, func.count(adalias.email_address)).outerjoin(('addresses', adalias)).group_by(User).order_by(User.id).all(),
- [(User(name='jack',id=7), 1), (User(name='ed',id=8), 3), (User(name='fred',id=9), 1), (User(name='chuck',id=10), 0)]
+ eq_(sess.query(User, func.count(adalias.email_address)).\
+ outerjoin(('addresses', adalias)).group_by(User).\
+ order_by(User.id).all(),
+ [(User(name='jack',id=7), 1), (User(name='ed',id=8), 3),
+ (User(name='fred',id=9), 1), (User(name='chuck',id=10), 0)]
)
- eq_(sess.query(func.count(adalias.email_address), User).outerjoin((User.addresses, adalias)).group_by(User).order_by(User.id).all(),
- [(1, User(name=u'jack',id=7)), (3, User(name=u'ed',id=8)), (1, User(name=u'fred',id=9)), (0, User(name=u'chuck',id=10))]
+ eq_(sess.query(func.count(adalias.email_address), User).\
+ outerjoin((User.addresses, adalias)).group_by(User).\
+ order_by(User.id).all(),
+ [(1, User(name=u'jack',id=7)), (3, User(name=u'ed',id=8)),
+ (1, User(name=u'fred',id=9)), (0, User(name=u'chuck',id=10))]
)
# select from aliasing + explicit aliasing
eq_(
- sess.query(User, adalias.email_address, adalias.id).outerjoin((User.addresses, adalias)).from_self(User, adalias.email_address).order_by(User.id, adalias.id).all(),
+ sess.query(User, adalias.email_address, adalias.id).\
+ outerjoin((User.addresses, adalias)).\
+ from_self(User, adalias.email_address).\
+ order_by(User.id, adalias.id).all(),
[
(User(name=u'jack',id=7), u'jack@bean.com'),
(User(name=u'ed',id=8), u'ed@wood.com'),
@@ -3046,7 +3129,9 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
# anon + select from aliasing
eq_(
- sess.query(User).join(User.addresses, aliased=True).filter(Address.email_address.like('%ed%')).from_self().all(),
+ sess.query(User).join(User.addresses, aliased=True).\
+ filter(Address.email_address.like('%ed%')).\
+ from_self().all(),
[
User(name=u'ed',id=8),
User(name=u'fred',id=9),
@@ -3055,26 +3140,39 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
# test eager aliasing, with/without select_from aliasing
for q in [
- sess.query(User, adalias.email_address).outerjoin((User.addresses, adalias)).options(joinedload(User.addresses)).order_by(User.id, adalias.id).limit(10),
- sess.query(User, adalias.email_address, adalias.id).outerjoin((User.addresses, adalias)).from_self(User, adalias.email_address).options(joinedload(User.addresses)).order_by(User.id, adalias.id).limit(10),
+ sess.query(User, adalias.email_address).\
+ outerjoin((User.addresses, adalias)).\
+ options(joinedload(User.addresses)).\
+ order_by(User.id, adalias.id).limit(10),
+ sess.query(User, adalias.email_address, adalias.id).\
+ outerjoin((User.addresses, adalias)).\
+ from_self(User, adalias.email_address).\
+ options(joinedload(User.addresses)).\
+ order_by(User.id, adalias.id).limit(10),
]:
eq_(
q.all(),
- [(User(addresses=[Address(user_id=7,email_address=u'jack@bean.com',id=1)],name=u'jack',id=7), u'jack@bean.com'),
+ [(User(addresses=[
+ Address(user_id=7,email_address=u'jack@bean.com',id=1)],
+ name=u'jack',id=7), u'jack@bean.com'),
(User(addresses=[
Address(user_id=8,email_address=u'ed@wood.com',id=2),
Address(user_id=8,email_address=u'ed@bettyboop.com',id=3),
- Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8), u'ed@wood.com'),
+ Address(user_id=8,email_address=u'ed@lala.com',id=4)],
+ name=u'ed',id=8), u'ed@wood.com'),
(User(addresses=[
Address(user_id=8,email_address=u'ed@wood.com',id=2),
Address(user_id=8,email_address=u'ed@bettyboop.com',id=3),
- Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8), u'ed@bettyboop.com'),
+ Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8),
+ u'ed@bettyboop.com'),
(User(addresses=[
Address(user_id=8,email_address=u'ed@wood.com',id=2),
Address(user_id=8,email_address=u'ed@bettyboop.com',id=3),
- Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8), u'ed@lala.com'),
- (User(addresses=[Address(user_id=9,email_address=u'fred@fred.com',id=5)],name=u'fred',id=9), u'fred@fred.com'),
+ Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8),
+ u'ed@lala.com'),
+ (User(addresses=[Address(user_id=9,email_address=u'fred@fred.com',id=5)],name=u'fred',id=9),
+ u'fred@fred.com'),
(User(addresses=[],name=u'chuck',id=10), None)]
)
@@ -3083,7 +3181,9 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sess = create_session()
def go():
- results = sess.query(User).limit(1).options(joinedload('addresses')).add_column(User.name).all()
+ results = sess.query(User).limit(1).\
+ options(joinedload('addresses')).\
+ add_column(User.name).all()
eq_(results, [(User(name='jack'), 'jack')])
self.assert_sql_count(testing.db, go, 1)
@@ -3094,26 +3194,41 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
oalias = aliased(Order)
for q in [
- sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).filter(Order.id>oalias.id).order_by(Order.id, oalias.id),
- sess.query(Order, oalias).from_self().filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).filter(Order.id>oalias.id).order_by(Order.id, oalias.id),
+ sess.query(Order, oalias).\
+ filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).\
+ filter(Order.id>oalias.id).order_by(Order.id, oalias.id),
+ sess.query(Order, oalias).from_self().filter(Order.user_id==oalias.user_id).\
+ filter(Order.user_id==7).filter(Order.id>oalias.id).\
+ order_by(Order.id, oalias.id),
# same thing, but reversed.
- sess.query(oalias, Order).from_self().filter(oalias.user_id==Order.user_id).filter(oalias.user_id==7).filter(Order.id<oalias.id).order_by(oalias.id, Order.id),
+ sess.query(oalias, Order).from_self().filter(oalias.user_id==Order.user_id).\
+ filter(oalias.user_id==7).filter(Order.id<oalias.id).\
+ order_by(oalias.id, Order.id),
# here we go....two layers of aliasing
- sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).filter(Order.id>oalias.id).from_self().order_by(Order.id, oalias.id).limit(10).options(joinedload(Order.items)),
+ sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).\
+ filter(Order.user_id==7).filter(Order.id>oalias.id).\
+ from_self().order_by(Order.id, oalias.id).\
+ limit(10).options(joinedload(Order.items)),
# gratuitous four layers
- sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).filter(Order.id>oalias.id).from_self().from_self().from_self().order_by(Order.id, oalias.id).limit(10).options(joinedload(Order.items)),
+ sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).\
+ filter(Order.user_id==7).filter(Order.id>oalias.id).from_self().\
+ from_self().from_self().order_by(Order.id, oalias.id).\
+ limit(10).options(joinedload(Order.items)),
]:
eq_(
q.all(),
[
- (Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)),
- (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)),
- (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3))
+ (Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3),
+ Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)),
+ (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5),
+ Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)),
+ (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5),
+ Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3))
]
)
@@ -3121,10 +3236,16 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
# ensure column expressions are taken from inside the subquery, not restated at the top
q = sess.query(Order.id, Order.description, literal_column("'q'").label('foo')).\
filter(Order.description == u'order 3').from_self()
- self.assert_compile(q,
- "SELECT anon_1.orders_id AS anon_1_orders_id, anon_1.orders_description AS anon_1_orders_description, "
- "anon_1.foo AS anon_1_foo FROM (SELECT orders.id AS orders_id, orders.description AS orders_description, "
- "'q' AS foo FROM orders WHERE orders.description = :description_1) AS anon_1", use_default_dialect=True)
+ self.assert_compile(q,
+ "SELECT anon_1.orders_id AS "
+ "anon_1_orders_id, anon_1.orders_descriptio"
+ "n AS anon_1_orders_description, "
+ "anon_1.foo AS anon_1_foo FROM (SELECT "
+ "orders.id AS orders_id, "
+ "orders.description AS orders_description, "
+ "'q' AS foo FROM orders WHERE "
+ "orders.description = :description_1) AS "
+ "anon_1", use_default_dialect=True)
eq_(
q.all(),
[(3, u'order 3', 'q')]
@@ -3136,7 +3257,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
test_session = create_session()
(user7, user8, user9, user10) = test_session.query(User).all()
- (address1, address2, address3, address4, address5) = test_session.query(Address).all()
+ (address1, address2, address3, address4, address5) = \
+ test_session.query(Address).all()
expected = [(user7, address1),
(user8, address2),
@@ -3152,7 +3274,9 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sess.expunge_all()
for address_entity in (Address, aliased(Address)):
- q = sess.query(User).add_entity(address_entity).outerjoin(('addresses', address_entity)).order_by(User.id, address_entity.id)
+ q = sess.query(User).add_entity(address_entity).\
+ outerjoin(('addresses', address_entity)).\
+ order_by(User.id, address_entity.id)
eq_(q.all(), expected)
sess.expunge_all()
@@ -3161,11 +3285,14 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
eq_(q.all(), [(user8, address3)])
sess.expunge_all()
- q = sess.query(User, address_entity).join(('addresses', address_entity)).filter_by(email_address='ed@bettyboop.com')
+ q = sess.query(User, address_entity).join(('addresses', address_entity)).\
+ filter_by(email_address='ed@bettyboop.com')
eq_(q.all(), [(user8, address3)])
sess.expunge_all()
- q = sess.query(User, address_entity).join(('addresses', address_entity)).options(joinedload('addresses')).filter_by(email_address='ed@bettyboop.com')
+ q = sess.query(User, address_entity).join(('addresses', address_entity)).\
+ options(joinedload('addresses')).\
+ filter_by(email_address='ed@bettyboop.com')
eq_(list(util.OrderedSet(q.all())), [(user8, address3)])
sess.expunge_all()
@@ -3194,6 +3321,24 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
l = q.select_from(users.outerjoin(adalias)).filter(adalias.c.email_address=='ed@bettyboop.com').all()
assert l == [(user8, address3)]
+ def test_with_entities(self):
+ sess = create_session()
+
+ q = sess.query(User).filter(User.id==7).order_by(User.name)
+
+ self.assert_compile(
+ q.with_entities(User.id,Address).\
+ filter(Address.user_id == User.id),
+ 'SELECT users.id AS users_id, addresses.id '
+ 'AS addresses_id, addresses.user_id AS '
+ 'addresses_user_id, addresses.email_address'
+ ' AS addresses_email_address FROM users, '
+ 'addresses WHERE users.id = :id_1 AND '
+ 'addresses.user_id = users.id ORDER BY '
+ 'users.name',
+ use_default_dialect=True)
+
+
def test_multi_columns(self):
sess = create_session()
@@ -3227,18 +3372,22 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
]
q = sess.query(User)
- q = q.group_by(users).order_by(User.id).outerjoin('addresses').add_column(func.count(Address.id).label('count'))
+ q = q.group_by(users).order_by(User.id).outerjoin('addresses').\
+ add_column(func.count(Address.id).label('count'))
eq_(q.all(), expected)
sess.expunge_all()
adalias = aliased(Address)
q = sess.query(User)
- q = q.group_by(users).order_by(User.id).outerjoin(('addresses', adalias)).add_column(func.count(adalias.id).label('count'))
+ q = q.group_by(users).order_by(User.id).outerjoin(('addresses', adalias)).\
+ add_column(func.count(adalias.id).label('count'))
eq_(q.all(), expected)
sess.expunge_all()
# TODO: figure out why group_by(users) doesn't work here
- s = select([users, func.count(addresses.c.id).label('count')]).select_from(users.outerjoin(addresses)).group_by(*[c for c in users.c]).order_by(User.id)
+ s = select([users, func.count(addresses.c.id).label('count')]).\
+ select_from(users.outerjoin(addresses)).\
+ group_by(*[c for c in users.c]).order_by(User.id)
q = sess.query(User)
l = q.add_column("count").from_statement(s).all()
assert l == expected
@@ -3261,7 +3410,10 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
assert q.all() == expected
# test with a straight statement
- s = select([users, func.count(addresses.c.id).label('count'), ("Name:" + users.c.name).label('concat')], from_obj=[users.outerjoin(addresses)], group_by=[c for c in users.c], order_by=[users.c.id])
+ s = select([users, func.count(addresses.c.id).label('count'),
+ ("Name:" + users.c.name).label('concat')],
+ from_obj=[users.outerjoin(addresses)],
+ group_by=[c for c in users.c], order_by=[users.c.id])
q = create_session().query(User)
l = q.add_column("count").add_column("concat").from_statement(s).all()
assert l == expected
@@ -4143,7 +4295,10 @@ class ExternalColumnsTest(QueryTest):
mapper(User, users, properties={
'concat': column_property((users.c.id * 2)),
- 'count': column_property(select([func.count(addresses.c.id)], users.c.id==addresses.c.user_id).correlate(users).as_scalar())
+ 'count': column_property(
+ select([func.count(addresses.c.id)], users.c.id==addresses.c.user_id).\
+ correlate(users).\
+ as_scalar())
})
mapper(Address, addresses, properties={
@@ -4176,7 +4331,10 @@ class ExternalColumnsTest(QueryTest):
for x in range(2):
sess.expunge_all()
def go():
- eq_(sess.query(Address).options(joinedload('user')).order_by(Address.id).all(), address_result)
+ eq_(sess.query(Address).\
+ options(joinedload('user')).\
+ order_by(Address.id).all(),
+ address_result)
self.assert_sql_count(testing.db, go, 1)
ualias = aliased(User)
@@ -4186,7 +4344,10 @@ class ExternalColumnsTest(QueryTest):
)
eq_(
- sess.query(Address, ualias.count).join(('user', ualias)).join('user', aliased=True).order_by(Address.id).all(),
+ sess.query(Address, ualias.count).\
+ join(('user', ualias)).\
+ join('user', aliased=True).\
+ order_by(Address.id).all(),
[
(Address(id=1), 1),
(Address(id=2), 3),
diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py
index 0362a157e..d93f11f92 100644
--- a/test/orm/test_unitofwork.py
+++ b/test/orm/test_unitofwork.py
@@ -359,6 +359,25 @@ class MutableTypesTest(_base.MappedTest):
gc.collect()
f1 = session.query(Foo).first()
assert not attributes.instance_state(f1).modified
+
+ @testing.resolve_artifact_names
+ def test_modified_after_mutable_change(self):
+ f1 = Foo(data = pickleable.Bar(4, 5), val=u'some val')
+ session = Session()
+ session.add(f1)
+ session.commit()
+ f1.data.x = 10
+ f1.data.y = 15
+ f1.val=u'some new val'
+
+ assert sa.orm.attributes.instance_state(f1)._strong_obj is not None
+
+ del f1
+ session.commit()
+ eq_(
+ session.query(Foo.val).all(),
+ [('some new val', )]
+ )
@testing.resolve_artifact_names
def test_unicode(self):
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 4c712ce38..338a5491e 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -129,24 +129,34 @@ class SelectTest(TestBase, AssertsCompiledSQL):
use_labels = True
).alias('sq')
- sqstring = "SELECT mytable.myid AS mytable_myid, mytable.name AS mytable_name, \
-mytable.description AS mytable_description, myothertable.otherid AS myothertable_otherid, \
-myothertable.othername AS myothertable_othername FROM mytable, myothertable \
-WHERE mytable.myid = :myid_1 AND myothertable.otherid = mytable.myid"
+ sqstring = "SELECT mytable.myid AS mytable_myid, mytable.name AS "\
+ "mytable_name, mytable.description AS mytable_description, "\
+ "myothertable.otherid AS myothertable_otherid, "\
+ "myothertable.othername AS myothertable_othername FROM "\
+ "mytable, myothertable WHERE mytable.myid = :myid_1 AND "\
+ "myothertable.otherid = mytable.myid"
- self.assert_compile(sq.select(), "SELECT sq.mytable_myid, sq.mytable_name, sq.mytable_description, sq.myothertable_otherid, \
-sq.myothertable_othername FROM (" + sqstring + ") AS sq")
+ self.assert_compile(
+ sq.select(),
+ "SELECT sq.mytable_myid, sq.mytable_name, "
+ "sq.mytable_description, sq.myothertable_otherid, "
+ "sq.myothertable_othername FROM (%s) AS sq" % sqstring)
sq2 = select(
[sq],
use_labels = True
).alias('sq2')
- self.assert_compile(sq2.select(), "SELECT sq2.sq_mytable_myid, sq2.sq_mytable_name, sq2.sq_mytable_description, \
-sq2.sq_myothertable_otherid, sq2.sq_myothertable_othername FROM \
-(SELECT sq.mytable_myid AS sq_mytable_myid, sq.mytable_name AS sq_mytable_name, \
-sq.mytable_description AS sq_mytable_description, sq.myothertable_otherid AS sq_myothertable_otherid, \
-sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") AS sq) AS sq2")
+ self.assert_compile(
+ sq2.select(),
+ "SELECT sq2.sq_mytable_myid, sq2.sq_mytable_name, "
+ "sq2.sq_mytable_description, sq2.sq_myothertable_otherid, "
+ "sq2.sq_myothertable_othername FROM (SELECT sq.mytable_myid AS "
+ "sq_mytable_myid, sq.mytable_name AS sq_mytable_name, "
+ "sq.mytable_description AS sq_mytable_description, "
+ "sq.myothertable_otherid AS sq_myothertable_otherid, "
+ "sq.myothertable_othername AS sq_myothertable_othername "
+ "FROM (%s) AS sq) AS sq2" % sqstring)
def test_select_from_clauselist(self):
self.assert_compile(
@@ -645,23 +655,28 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
def test_distinct(self):
self.assert_compile(
- select([table1.c.myid.distinct()]), "SELECT DISTINCT mytable.myid FROM mytable"
+ select([table1.c.myid.distinct()]),
+ "SELECT DISTINCT mytable.myid FROM mytable"
)
self.assert_compile(
- select([distinct(table1.c.myid)]), "SELECT DISTINCT mytable.myid FROM mytable"
+ select([distinct(table1.c.myid)]),
+ "SELECT DISTINCT mytable.myid FROM mytable"
)
self.assert_compile(
- select([table1.c.myid]).distinct(), "SELECT DISTINCT mytable.myid FROM mytable"
+ select([table1.c.myid]).distinct(),
+ "SELECT DISTINCT mytable.myid FROM mytable"
)
self.assert_compile(
- select([func.count(table1.c.myid.distinct())]), "SELECT count(DISTINCT mytable.myid) AS count_1 FROM mytable"
+ select([func.count(table1.c.myid.distinct())]),
+ "SELECT count(DISTINCT mytable.myid) AS count_1 FROM mytable"
)
self.assert_compile(
- select([func.count(distinct(table1.c.myid))]), "SELECT count(DISTINCT mytable.myid) AS count_1 FROM mytable"
+ select([func.count(distinct(table1.c.myid))]),
+ "SELECT count(DISTINCT mytable.myid) AS count_1 FROM mytable"
)
def test_operators(self):
@@ -731,24 +746,31 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
self.assert_compile(
table1.select((table1.c.myid != 12) & ~(table1.c.name=='john')),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE mytable.myid != :myid_1 AND mytable.name != :name_1"
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable WHERE mytable.myid != :myid_1 AND mytable.name != :name_1"
)
self.assert_compile(
- table1.select((table1.c.myid != 12) & ~(table1.c.name.between('jack','john'))),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE mytable.myid != :myid_1 AND "\
- "NOT (mytable.name BETWEEN :name_1 AND :name_2)"
+ table1.select((table1.c.myid != 12) &
+ ~(table1.c.name.between('jack','john'))),
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable WHERE mytable.myid != :myid_1 AND "\
+ "NOT (mytable.name BETWEEN :name_1 AND :name_2)"
)
self.assert_compile(
- table1.select((table1.c.myid != 12) & ~and_(table1.c.name=='john', table1.c.name=='ed', table1.c.name=='fred')),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE mytable.myid != :myid_1 AND "\
- "NOT (mytable.name = :name_1 AND mytable.name = :name_2 AND mytable.name = :name_3)"
+ table1.select((table1.c.myid != 12) &
+ ~and_(table1.c.name=='john', table1.c.name=='ed', table1.c.name=='fred')),
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable WHERE mytable.myid != :myid_1 AND "\
+ "NOT (mytable.name = :name_1 AND mytable.name = :name_2 "
+ "AND mytable.name = :name_3)"
)
self.assert_compile(
table1.select((table1.c.myid != 12) & ~table1.c.name),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE mytable.myid != :myid_1 AND NOT mytable.name"
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable WHERE mytable.myid != :myid_1 AND NOT mytable.name"
)
self.assert_compile(
@@ -758,11 +780,13 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
# test the op() function, also that its results are further usable in expressions
self.assert_compile(
table1.select(table1.c.myid.op('hoho')(12)==14),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE (mytable.myid hoho :myid_1) = :param_1"
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable WHERE (mytable.myid hoho :myid_1) = :param_1"
)
# test that clauses can be pickled (operators need to be module-level, etc.)
- clause = (table1.c.myid == 12) & table1.c.myid.between(15, 20) & table1.c.myid.like('hoho')
+ clause = (table1.c.myid == 12) & table1.c.myid.between(15, 20) & \
+ table1.c.myid.like('hoho')
assert str(clause) == str(util.pickle.loads(util.pickle.dumps(clause)))
@@ -816,47 +840,85 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
def test_match(self):
for expr, check, dialect in [
- (table1.c.myid.match('somstr'), "mytable.myid MATCH ?", sqlite.SQLiteDialect()),
- (table1.c.myid.match('somstr'), "MATCH (mytable.myid) AGAINST (%s IN BOOLEAN MODE)", mysql.dialect()),
- (table1.c.myid.match('somstr'), "CONTAINS (mytable.myid, :myid_1)", mssql.dialect()),
- (table1.c.myid.match('somstr'), "mytable.myid @@ to_tsquery(%(myid_1)s)", postgresql.dialect()),
- (table1.c.myid.match('somstr'), "CONTAINS (mytable.myid, :myid_1)", oracle.dialect()),
+ (table1.c.myid.match('somstr'),
+ "mytable.myid MATCH ?", sqlite.SQLiteDialect()),
+ (table1.c.myid.match('somstr'),
+ "MATCH (mytable.myid) AGAINST (%s IN BOOLEAN MODE)",
+ mysql.dialect()),
+ (table1.c.myid.match('somstr'),
+ "CONTAINS (mytable.myid, :myid_1)",
+ mssql.dialect()),
+ (table1.c.myid.match('somstr'),
+ "mytable.myid @@ to_tsquery(%(myid_1)s)",
+ postgresql.dialect()),
+ (table1.c.myid.match('somstr'),
+ "CONTAINS (mytable.myid, :myid_1)",
+ oracle.dialect()),
]:
self.assert_compile(expr, check, dialect=dialect)
def test_composed_string_comparators(self):
self.assert_compile(
- table1.c.name.contains('jo'), "mytable.name LIKE '%%' || :name_1 || '%%'" , checkparams = {'name_1': u'jo'},
+ table1.c.name.contains('jo'),
+ "mytable.name LIKE '%%' || :name_1 || '%%'" ,
+ checkparams = {'name_1': u'jo'},
)
self.assert_compile(
- table1.c.name.contains('jo'), "mytable.name LIKE concat(concat('%%', %s), '%%')" , checkparams = {'name_1': u'jo'},
+ table1.c.name.contains('jo'),
+ "mytable.name LIKE concat(concat('%%', %s), '%%')" ,
+ checkparams = {'name_1': u'jo'},
dialect=mysql.dialect()
)
self.assert_compile(
- table1.c.name.contains('jo', escape='\\'), "mytable.name LIKE '%%' || :name_1 || '%%' ESCAPE '\\'" , checkparams = {'name_1': u'jo'},
+ table1.c.name.contains('jo', escape='\\'),
+ "mytable.name LIKE '%%' || :name_1 || '%%' ESCAPE '\\'" ,
+ checkparams = {'name_1': u'jo'},
)
- self.assert_compile( table1.c.name.startswith('jo', escape='\\'), "mytable.name LIKE :name_1 || '%%' ESCAPE '\\'" )
- self.assert_compile( table1.c.name.endswith('jo', escape='\\'), "mytable.name LIKE '%%' || :name_1 ESCAPE '\\'" )
- self.assert_compile( table1.c.name.endswith('hn'), "mytable.name LIKE '%%' || :name_1", checkparams = {'name_1': u'hn'}, )
self.assert_compile(
- table1.c.name.endswith('hn'), "mytable.name LIKE concat('%%', %s)",
+ table1.c.name.startswith('jo', escape='\\'),
+ "mytable.name LIKE :name_1 || '%%' ESCAPE '\\'" )
+ self.assert_compile(
+ table1.c.name.endswith('jo', escape='\\'),
+ "mytable.name LIKE '%%' || :name_1 ESCAPE '\\'" )
+ self.assert_compile(
+ table1.c.name.endswith('hn'),
+ "mytable.name LIKE '%%' || :name_1",
+ checkparams = {'name_1': u'hn'}, )
+ self.assert_compile(
+ table1.c.name.endswith('hn'),
+ "mytable.name LIKE concat('%%', %s)",
checkparams = {'name_1': u'hn'}, dialect=mysql.dialect()
)
self.assert_compile(
- table1.c.name.startswith(u"hi \xf6 \xf5"), "mytable.name LIKE :name_1 || '%%'",
+ table1.c.name.startswith(u"hi \xf6 \xf5"),
+ "mytable.name LIKE :name_1 || '%%'",
checkparams = {'name_1': u'hi \xf6 \xf5'},
)
- self.assert_compile(column('name').endswith(text("'foo'")), "name LIKE '%%' || 'foo'" )
- self.assert_compile(column('name').endswith(literal_column("'foo'")), "name LIKE '%%' || 'foo'" )
- self.assert_compile(column('name').startswith(text("'foo'")), "name LIKE 'foo' || '%%'" )
- self.assert_compile(column('name').startswith(text("'foo'")), "name LIKE concat('foo', '%%')", dialect=mysql.dialect())
- self.assert_compile(column('name').startswith(literal_column("'foo'")), "name LIKE 'foo' || '%%'" )
- self.assert_compile(column('name').startswith(literal_column("'foo'")), "name LIKE concat('foo', '%%')", dialect=mysql.dialect())
+ self.assert_compile(
+ column('name').endswith(text("'foo'")),
+ "name LIKE '%%' || 'foo'" )
+ self.assert_compile(
+ column('name').endswith(literal_column("'foo'")),
+ "name LIKE '%%' || 'foo'" )
+ self.assert_compile(
+ column('name').startswith(text("'foo'")),
+ "name LIKE 'foo' || '%%'" )
+ self.assert_compile(
+ column('name').startswith(text("'foo'")),
+ "name LIKE concat('foo', '%%')", dialect=mysql.dialect())
+ self.assert_compile(
+ column('name').startswith(literal_column("'foo'")),
+ "name LIKE 'foo' || '%%'" )
+ self.assert_compile(
+ column('name').startswith(literal_column("'foo'")),
+ "name LIKE concat('foo', '%%')", dialect=mysql.dialect())
def test_multiple_col_binds(self):
self.assert_compile(
- select(["*"], or_(table1.c.myid == 12, table1.c.myid=='asdf', table1.c.myid == 'foo')),
- "SELECT * FROM mytable WHERE mytable.myid = :myid_1 OR mytable.myid = :myid_2 OR mytable.myid = :myid_3"
+ select(["*"], or_(table1.c.myid == 12, table1.c.myid=='asdf',
+ table1.c.myid == 'foo')),
+ "SELECT * FROM mytable WHERE mytable.myid = :myid_1 "
+ "OR mytable.myid = :myid_2 OR mytable.myid = :myid_3"
)
def test_orderby_groupby(self):
@@ -997,7 +1059,9 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
def test_prefixes(self):
- self.assert_compile(table1.select().prefix_with("SQL_CALC_FOUND_ROWS").prefix_with("SQL_SOME_WEIRD_MYSQL_THING"),
+ self.assert_compile(
+ table1.select().prefix_with("SQL_CALC_FOUND_ROWS").\
+ prefix_with("SQL_SOME_WEIRD_MYSQL_THING"),
"SELECT SQL_CALC_FOUND_ROWS SQL_SOME_WEIRD_MYSQL_THING "
"mytable.myid, mytable.name, mytable.description FROM mytable"
)
@@ -1270,8 +1334,11 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
"COLLATE latin1_german2_ci AS x")
- expr = select([table1.c.name]).order_by(table1.c.name.collate('latin1_german2_ci'))
- self.assert_compile(expr, "SELECT mytable.name FROM mytable ORDER BY mytable.name COLLATE latin1_german2_ci")
+ expr = select([table1.c.name]).\
+ order_by(table1.c.name.collate('latin1_german2_ci'))
+ self.assert_compile(expr,
+ "SELECT mytable.name FROM mytable ORDER BY "
+ "mytable.name COLLATE latin1_german2_ci")
def test_percent_chars(self):
t = table("table%name",
@@ -1283,7 +1350,8 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
t.select(use_labels=True),
'''SELECT "table%name"."percent%" AS "table%name_percent%", '''\
'''"table%name"."%(oneofthese)s" AS "table%name_%(oneofthese)s", '''\
- '''"table%name"."spaces % more spaces" AS "table%name_spaces % more spaces" FROM "table%name"'''
+ '''"table%name"."spaces % more spaces" AS "table%name_spaces % '''\
+ '''more spaces" FROM "table%name"'''
)
@@ -1367,10 +1435,13 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
)
def test_compound_selects(self):
- try:
- union(table3.select(), table1.select())
- except exc.ArgumentError, err:
- assert str(err) == "All selectables passed to CompoundSelect must have identical numbers of columns; select #1 has 2 columns, select #2 has 3"
+ assert_raises_message(
+ exc.ArgumentError,
+ "All selectables passed to CompoundSelect "
+ "must have identical numbers of columns; "
+ "select #1 has 2 columns, select #2 has 3",
+ union, table3.select(), table1.select()
+ )
x = union(
select([table1], table1.c.myid == 5),
@@ -1580,29 +1651,51 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
table1.c.myid == table2.c.otherid,
table1.c.name == bindparam('mytablename')
)),
- """SELECT mytable.myid, mytable.name, mytable.description, myothertable.otherid, myothertable.othername FROM mytable, myothertable WHERE mytable.myid = myothertable.otherid AND mytable.name = :mytablename""",
- """SELECT mytable.myid, mytable.name, mytable.description, myothertable.otherid, myothertable.othername FROM mytable, myothertable WHERE mytable.myid = myothertable.otherid AND mytable.name = ?""",
+ "SELECT mytable.myid, mytable.name, mytable.description, "
+ "myothertable.otherid, myothertable.othername FROM mytable, "
+ "myothertable WHERE mytable.myid = myothertable.otherid "
+ "AND mytable.name = :mytablename",
+ "SELECT mytable.myid, mytable.name, mytable.description, "
+ "myothertable.otherid, myothertable.othername FROM mytable, "
+ "myothertable WHERE mytable.myid = myothertable.otherid AND "
+ "mytable.name = ?",
{'mytablename':None}, [None],
{'mytablename':5}, {'mytablename':5}, [5]
),
(
- select([table1], or_(table1.c.myid==bindparam('myid'), table2.c.otherid==bindparam('myid'))),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = :myid OR myothertable.otherid = :myid",
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = ? OR myothertable.otherid = ?",
+ select([table1], or_(table1.c.myid==bindparam('myid'),
+ table2.c.otherid==bindparam('myid'))),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable, myothertable WHERE mytable.myid = :myid "
+ "OR myothertable.otherid = :myid",
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable, myothertable WHERE mytable.myid = ? "
+ "OR myothertable.otherid = ?",
{'myid':None}, [None, None],
{'myid':5}, {'myid':5}, [5,5]
),
(
- text("SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = :myid OR myothertable.otherid = :myid"),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = :myid OR myothertable.otherid = :myid",
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = ? OR myothertable.otherid = ?",
+ text("SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable, myothertable WHERE mytable.myid = :myid OR "
+ "myothertable.otherid = :myid"),
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable, myothertable WHERE mytable.myid = :myid OR "
+ "myothertable.otherid = :myid",
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable, myothertable WHERE mytable.myid = ? OR "
+ "myothertable.otherid = ?",
{'myid':None}, [None, None],
{'myid':5}, {'myid':5}, [5,5]
),
(
- select([table1], or_(table1.c.myid==bindparam('myid', unique=True), table2.c.otherid==bindparam('myid', unique=True))),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = :myid_1 OR myothertable.otherid = :myid_2",
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = ? OR myothertable.otherid = ?",
+ select([table1], or_(table1.c.myid==bindparam('myid', unique=True),
+ table2.c.otherid==bindparam('myid', unique=True))),
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable, myothertable WHERE mytable.myid = "
+ ":myid_1 OR myothertable.otherid = :myid_2",
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable, myothertable WHERE mytable.myid = ? "
+ "OR myothertable.otherid = ?",
{'myid_1':None, 'myid_2':None}, [None, None],
{'myid_1':5, 'myid_2': 6}, {'myid_1':5, 'myid_2':6}, [5,6]
),
@@ -1614,16 +1707,27 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
{}, {'test':None}, [None]
),
(
- select([table1], or_(table1.c.myid==bindparam('myid'), table2.c.otherid==bindparam('myotherid'))).params({'myid':8, 'myotherid':7}),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = :myid OR myothertable.otherid = :myotherid",
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = ? OR myothertable.otherid = ?",
+ select([table1], or_(table1.c.myid==bindparam('myid'),
+ table2.c.otherid==bindparam('myotherid'))).\
+ params({'myid':8, 'myotherid':7}),
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable, myothertable WHERE mytable.myid = "
+ ":myid OR myothertable.otherid = :myotherid",
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable, myothertable WHERE mytable.myid = "
+ "? OR myothertable.otherid = ?",
{'myid':8, 'myotherid':7}, [8, 7],
{'myid':5}, {'myid':5, 'myotherid':7}, [5,7]
),
(
- select([table1], or_(table1.c.myid==bindparam('myid', value=7, unique=True), table2.c.otherid==bindparam('myid', value=8, unique=True))),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = :myid_1 OR myothertable.otherid = :myid_2",
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable, myothertable WHERE mytable.myid = ? OR myothertable.otherid = ?",
+ select([table1], or_(table1.c.myid==bindparam('myid', value=7, unique=True),
+ table2.c.otherid==bindparam('myid', value=8, unique=True))),
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable, myothertable WHERE mytable.myid = "
+ ":myid_1 OR myothertable.otherid = :myid_2",
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable, myothertable WHERE mytable.myid = "
+ "? OR myothertable.otherid = ?",
{'myid_1':7, 'myid_2':8}, [7,8],
{'myid_1':5, 'myid_2':6}, {'myid_1':5, 'myid_2':6}, [5,6]
),
@@ -1635,12 +1739,15 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
positional = stmt.compile(dialect=sqlite.dialect())
pp = positional.params
assert [pp[k] for k in positional.positiontup] == expected_default_params_list
- assert nonpositional.construct_params(test_param_dict) == expected_test_params_dict, "expected :%s got %s" % (str(expected_test_params_dict), str(nonpositional.get_params(**test_param_dict)))
+ assert nonpositional.construct_params(test_param_dict) == expected_test_params_dict, \
+ "expected :%s got %s" % (str(expected_test_params_dict), \
+ str(nonpositional.get_params(**test_param_dict)))
pp = positional.construct_params(test_param_dict)
assert [pp[k] for k in positional.positiontup] == expected_test_params_list
# check that params() doesnt modify original statement
- s = select([table1], or_(table1.c.myid==bindparam('myid'), table2.c.otherid==bindparam('myotherid')))
+ s = select([table1], or_(table1.c.myid==bindparam('myid'),
+ table2.c.otherid==bindparam('myotherid')))
s2 = s.params({'myid':8, 'myotherid':7})
s3 = s2.params({'myid':9})
assert s.compile().params == {'myid':None, 'myotherid':None}
@@ -1651,19 +1758,29 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
s = select([table1.c.myid]).where(table1.c.myid==12).as_scalar()
s2 = select([table1, s], table1.c.myid==s)
self.assert_compile(s2,
- "SELECT mytable.myid, mytable.name, mytable.description, (SELECT mytable.myid FROM mytable WHERE mytable.myid = "\
- ":myid_1) AS anon_1 FROM mytable WHERE mytable.myid = (SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_1)")
+ "SELECT mytable.myid, mytable.name, mytable.description, "
+ "(SELECT mytable.myid FROM mytable WHERE mytable.myid = "\
+ ":myid_1) AS anon_1 FROM mytable WHERE mytable.myid = "
+ "(SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_1)")
positional = s2.compile(dialect=sqlite.dialect())
pp = positional.params
assert [pp[k] for k in positional.positiontup] == [12, 12]
# check that conflicts with "unique" params are caught
- s = select([table1], or_(table1.c.myid==7, table1.c.myid==bindparam('myid_1')))
- assert_raises_message(exc.CompileError, "conflicts with unique bind parameter of the same name", str, s)
-
- s = select([table1], or_(table1.c.myid==7, table1.c.myid==8, table1.c.myid==bindparam('myid_1')))
- assert_raises_message(exc.CompileError, "conflicts with unique bind parameter of the same name", str, s)
+ s = select([table1], or_(table1.c.myid==7,
+ table1.c.myid==bindparam('myid_1')))
+ assert_raises_message(exc.CompileError,
+ "conflicts with unique bind parameter "
+ "of the same name",
+ str, s)
+
+ s = select([table1], or_(table1.c.myid==7, table1.c.myid==8,
+ table1.c.myid==bindparam('myid_1')))
+ assert_raises_message(exc.CompileError,
+ "conflicts with unique bind parameter "
+ "of the same name",
+ str, s)
def test_binds_no_hash_collision(self):
"""test that construct_params doesn't corrupt dict due to hash collisions"""
@@ -1829,46 +1946,75 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
)
def check_results(dialect, expected_results, literal):
- eq_(len(expected_results), 5, 'Incorrect number of expected results')
- eq_(str(cast(tbl.c.v1, Numeric).compile(dialect=dialect)), 'CAST(casttest.v1 AS %s)' %expected_results[0])
- eq_(str(cast(tbl.c.v1, Numeric(12, 9)).compile(dialect=dialect)), 'CAST(casttest.v1 AS %s)' %expected_results[1])
- eq_(str(cast(tbl.c.ts, Date).compile(dialect=dialect)), 'CAST(casttest.ts AS %s)' %expected_results[2])
- eq_(str(cast(1234, Text).compile(dialect=dialect)), 'CAST(%s AS %s)' %(literal, expected_results[3]))
- eq_(str(cast('test', String(20)).compile(dialect=dialect)), 'CAST(%s AS %s)' %(literal, expected_results[4]))
+ eq_(len(expected_results), 5,
+ 'Incorrect number of expected results')
+ eq_(str(cast(tbl.c.v1, Numeric).compile(dialect=dialect)),
+ 'CAST(casttest.v1 AS %s)' % expected_results[0])
+ eq_(str(cast(tbl.c.v1, Numeric(12, 9)).compile(dialect=dialect)),
+ 'CAST(casttest.v1 AS %s)' % expected_results[1])
+ eq_(str(cast(tbl.c.ts, Date).compile(dialect=dialect)),
+ 'CAST(casttest.ts AS %s)' % expected_results[2])
+ eq_(str(cast(1234, Text).compile(dialect=dialect)),
+ 'CAST(%s AS %s)' % (literal, expected_results[3]))
+ eq_(str(cast('test', String(20)).compile(dialect=dialect)),
+ 'CAST(%s AS %s)' %(literal, expected_results[4]))
# fixme: shoving all of this dialect-specific stuff in one test
# is now officialy completely ridiculous AND non-obviously omits
# coverage on other dialects.
sel = select([tbl, cast(tbl.c.v1, Numeric)]).compile(dialect=dialect)
if isinstance(dialect, type(mysql.dialect())):
- eq_(str(sel), "SELECT casttest.id, casttest.v1, casttest.v2, casttest.ts, CAST(casttest.v1 AS DECIMAL) AS anon_1 \nFROM casttest")
+ eq_(str(sel),
+ "SELECT casttest.id, casttest.v1, casttest.v2, casttest.ts, "
+ "CAST(casttest.v1 AS DECIMAL) AS anon_1 \nFROM casttest")
else:
- eq_(str(sel), "SELECT casttest.id, casttest.v1, casttest.v2, casttest.ts, CAST(casttest.v1 AS NUMERIC) AS anon_1 \nFROM casttest")
+ eq_(str(sel),
+ "SELECT casttest.id, casttest.v1, casttest.v2, "
+ "casttest.ts, CAST(casttest.v1 AS NUMERIC) AS "
+ "anon_1 \nFROM casttest")
# first test with PostgreSQL engine
- check_results(postgresql.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'TEXT', 'VARCHAR(20)'], '%(param_1)s')
+ check_results(postgresql.dialect(), ['NUMERIC', 'NUMERIC(12, 9)'
+ , 'DATE', 'TEXT', 'VARCHAR(20)'], '%(param_1)s')
# then the Oracle engine
- check_results(oracle.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'CLOB', 'VARCHAR(20 CHAR)'], ':param_1')
+ check_results(oracle.dialect(), ['NUMERIC', 'NUMERIC(12, 9)',
+ 'DATE', 'CLOB', 'VARCHAR(20 CHAR)'], ':param_1')
# then the sqlite engine
- check_results(sqlite.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'TEXT', 'VARCHAR(20)'], '?')
+ check_results(sqlite.dialect(), ['NUMERIC', 'NUMERIC(12, 9)',
+ 'DATE', 'TEXT', 'VARCHAR(20)'], '?')
# then the MySQL engine
- check_results(mysql.dialect(), ['DECIMAL', 'DECIMAL(12, 9)', 'DATE', 'CHAR', 'CHAR(20)'], '%s')
-
- self.assert_compile(cast(text('NULL'), Integer), "CAST(NULL AS INTEGER)", dialect=sqlite.dialect())
- self.assert_compile(cast(null(), Integer), "CAST(NULL AS INTEGER)", dialect=sqlite.dialect())
- self.assert_compile(cast(literal_column('NULL'), Integer), "CAST(NULL AS INTEGER)", dialect=sqlite.dialect())
+ check_results(mysql.dialect(), ['DECIMAL', 'DECIMAL(12, 9)',
+ 'DATE', 'CHAR', 'CHAR(20)'], '%s')
+
+ self.assert_compile(cast(text('NULL'), Integer),
+ 'CAST(NULL AS INTEGER)',
+ dialect=sqlite.dialect())
+ self.assert_compile(cast(null(), Integer),
+ 'CAST(NULL AS INTEGER)',
+ dialect=sqlite.dialect())
+ self.assert_compile(cast(literal_column('NULL'), Integer),
+ 'CAST(NULL AS INTEGER)',
+ dialect=sqlite.dialect())
def test_date_between(self):
import datetime
table = Table('dt', metadata,
Column('date', Date))
- self.assert_compile(table.select(table.c.date.between(datetime.date(2006,6,1), datetime.date(2006,6,5))),
- "SELECT dt.date FROM dt WHERE dt.date BETWEEN :date_1 AND :date_2", checkparams={'date_1':datetime.date(2006,6,1), 'date_2':datetime.date(2006,6,5)})
+ self.assert_compile(
+ table.select(table.c.date.between(datetime.date(2006,6,1),
+ datetime.date(2006,6,5))),
+ "SELECT dt.date FROM dt WHERE dt.date BETWEEN :date_1 AND :date_2",
+ checkparams={'date_1':datetime.date(2006,6,1),
+ 'date_2':datetime.date(2006,6,5)})
- self.assert_compile(table.select(sql.between(table.c.date, datetime.date(2006,6,1), datetime.date(2006,6,5))),
- "SELECT dt.date FROM dt WHERE dt.date BETWEEN :date_1 AND :date_2", checkparams={'date_1':datetime.date(2006,6,1), 'date_2':datetime.date(2006,6,5)})
+ self.assert_compile(
+ table.select(sql.between(table.c.date, datetime.date(2006,6,1),
+ datetime.date(2006,6,5))),
+ "SELECT dt.date FROM dt WHERE dt.date BETWEEN :date_1 AND :date_2",
+ checkparams={'date_1':datetime.date(2006,6,1),
+ 'date_2':datetime.date(2006,6,5)})
def test_operator_precedence(self):
table = Table('op', metadata,
@@ -1984,12 +2130,18 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
s1 = select([s1])
if label:
- self.assert_compile(s1, "SELECT %s FROM (SELECT %s AS %s FROM mytable)" % (label, expr, label))
+ self.assert_compile(s1,
+ "SELECT %s FROM (SELECT %s AS %s FROM mytable)" %
+ (label, expr, label))
elif col.table is not None:
# sqlite rule labels subquery columns
- self.assert_compile(s1, "SELECT %s FROM (SELECT %s AS %s FROM mytable)" % (key,expr, key))
+ self.assert_compile(s1,
+ "SELECT %s FROM (SELECT %s AS %s FROM mytable)" %
+ (key,expr, key))
else:
- self.assert_compile(s1, "SELECT %s FROM (SELECT %s FROM mytable)" % (expr,expr))
+ self.assert_compile(s1,
+ "SELECT %s FROM (SELECT %s FROM mytable)" %
+ (expr,expr))
def test_hints(self):
s = select([table1.c.myid]).with_hint(table1, "test hint %(name)s")
@@ -2254,13 +2406,24 @@ class CRUDTest(TestBase, AssertsCompiledSQL):
# test one that is actually correlated...
s = select([table2.c.othername], table2.c.otherid == table1.c.myid)
u = table1.update(table1.c.name==s)
- self.assert_compile(u, "UPDATE mytable SET myid=:myid, name=:name, description=:description WHERE mytable.name = "\
- "(SELECT myothertable.othername FROM myothertable WHERE myothertable.otherid = mytable.myid)")
+ self.assert_compile(u,
+ "UPDATE mytable SET myid=:myid, name=:name, "
+ "description=:description WHERE mytable.name = "
+ "(SELECT myothertable.othername FROM myothertable "
+ "WHERE myothertable.otherid = mytable.myid)")
def test_delete(self):
- self.assert_compile(delete(table1, table1.c.myid == 7), "DELETE FROM mytable WHERE mytable.myid = :myid_1")
- self.assert_compile(table1.delete().where(table1.c.myid == 7), "DELETE FROM mytable WHERE mytable.myid = :myid_1")
- self.assert_compile(table1.delete().where(table1.c.myid == 7).where(table1.c.name=='somename'), "DELETE FROM mytable WHERE mytable.myid = :myid_1 AND mytable.name = :name_1")
+ self.assert_compile(
+ delete(table1, table1.c.myid == 7),
+ "DELETE FROM mytable WHERE mytable.myid = :myid_1")
+ self.assert_compile(
+ table1.delete().where(table1.c.myid == 7),
+ "DELETE FROM mytable WHERE mytable.myid = :myid_1")
+ self.assert_compile(
+ table1.delete().where(table1.c.myid == 7).\
+ where(table1.c.name=='somename'),
+ "DELETE FROM mytable WHERE mytable.myid = :myid_1 "
+ "AND mytable.name = :name_1")
def test_correlated_delete(self):
# test a non-correlated WHERE clause
@@ -2272,7 +2435,10 @@ class CRUDTest(TestBase, AssertsCompiledSQL):
# test one that is actually correlated...
s = select([table2.c.othername], table2.c.otherid == table1.c.myid)
u = table1.delete(table1.c.name==s)
- self.assert_compile(u, "DELETE FROM mytable WHERE mytable.name = (SELECT myothertable.othername FROM myothertable WHERE myothertable.otherid = mytable.myid)")
+ self.assert_compile(u,
+ "DELETE FROM mytable WHERE mytable.name = (SELECT "
+ "myothertable.othername FROM myothertable WHERE "
+ "myothertable.otherid = mytable.myid)")
def test_binds_that_match_columns(self):
"""test bind params named after column names
@@ -2352,7 +2518,10 @@ class InlineDefaultTest(TestBase, AssertsCompiledSQL):
Column('col2', Integer, default=select([func.coalesce(func.max(foo.c.id))])),
)
- self.assert_compile(t.insert(inline=True, values={}), "INSERT INTO test (col1, col2) VALUES (foo(:foo_1), (SELECT coalesce(max(foo.id)) AS coalesce_1 FROM foo))")
+ self.assert_compile(t.insert(inline=True, values={}),
+ "INSERT INTO test (col1, col2) VALUES (foo(:foo_1), "
+ "(SELECT coalesce(max(foo.id)) AS coalesce_1 FROM "
+ "foo))")
def test_update(self):
m = MetaData()
@@ -2365,7 +2534,10 @@ class InlineDefaultTest(TestBase, AssertsCompiledSQL):
Column('col3', String(30))
)
- self.assert_compile(t.update(inline=True, values={'col3':'foo'}), "UPDATE test SET col1=foo(:foo_1), col2=(SELECT coalesce(max(foo.id)) AS coalesce_1 FROM foo), col3=:col3")
+ self.assert_compile(t.update(inline=True, values={'col3':'foo'}),
+ "UPDATE test SET col1=foo(:foo_1), col2=(SELECT "
+ "coalesce(max(foo.id)) AS coalesce_1 FROM foo), "
+ "col3=:col3")
class SchemaTest(TestBase, AssertsCompiledSQL):
def test_select(self):
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index 2a21ce034..993843891 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -118,7 +118,7 @@ class PickleMetadataTest(TestBase):
mt = loads(dumps(meta))
-class UserDefinedTest(TestBase):
+class UserDefinedTest(TestBase, AssertsCompiledSQL):
"""tests user-defined types."""
def test_processing(self):
@@ -148,6 +148,116 @@ class UserDefinedTest(TestBase):
for col in row[3], row[4]:
assert isinstance(col, unicode)
+ def test_typedecorator_impl(self):
+ for impl_, exp, kw in [
+ (Float, "FLOAT", {}),
+ (Float, "FLOAT(2)", {'precision':2}),
+ (Float(2), "FLOAT(2)", {'precision':4}),
+ (Numeric(19, 2), "NUMERIC(19, 2)", {}),
+ ]:
+ for dialect_ in (postgresql, mssql, mysql):
+ dialect_ = dialect_.dialect()
+
+ raw_impl = types.to_instance(impl_, **kw)
+
+ class MyType(types.TypeDecorator):
+ impl = impl_
+
+ dec_type = MyType(**kw)
+
+ eq_(dec_type.impl.__class__, raw_impl.__class__)
+
+ raw_dialect_impl = raw_impl.dialect_impl(dialect_)
+ dec_dialect_impl = dec_type.dialect_impl(dialect_)
+ eq_(dec_dialect_impl.__class__, MyType)
+ eq_(raw_dialect_impl.__class__ , dec_dialect_impl.impl.__class__)
+
+ self.assert_compile(
+ MyType(**kw),
+ exp,
+ dialect=dialect_
+ )
+
+ def test_user_defined_typedec_impl(self):
+ class MyType(types.TypeDecorator):
+ impl = Float
+
+ def load_dialect_impl(self, dialect):
+ if dialect.name == 'sqlite':
+ return String(50)
+ else:
+ return super(MyType, self).load_dialect_impl(dialect)
+
+ sl = sqlite.dialect()
+ pg = postgresql.dialect()
+ t = MyType()
+ self.assert_compile(t, "VARCHAR(50)", dialect=sl)
+ self.assert_compile(t, "FLOAT", dialect=pg)
+ eq_(
+ t.dialect_impl(dialect=sl).impl.__class__,
+ String().dialect_impl(dialect=sl).__class__
+ )
+ eq_(
+ t.dialect_impl(dialect=pg).impl.__class__,
+ Float().dialect_impl(pg).__class__
+ )
+
+ @testing.provide_metadata
+ def test_type_coerce(self):
+ """test ad-hoc usage of custom types with type_coerce()."""
+
+ class MyType(types.TypeDecorator):
+ impl = String
+
+ def process_bind_param(self, value, dialect):
+ return value[0:-8]
+
+ def process_result_value(self, value, dialect):
+ return value + "BIND_OUT"
+
+ t = Table('t', metadata, Column('data', String(50)))
+ metadata.create_all()
+
+ t.insert().values(data=type_coerce('d1BIND_OUT',MyType)).execute()
+
+ eq_(
+ select([type_coerce(t.c.data, MyType)]).execute().fetchall(),
+ [('d1BIND_OUT', )]
+ )
+
+ eq_(
+ select([t.c.data, type_coerce(t.c.data, MyType)]).execute().fetchall(),
+ [('d1', 'd1BIND_OUT')]
+ )
+
+ eq_(
+ select([t.c.data, type_coerce(t.c.data, MyType)]).\
+ where(type_coerce(t.c.data, MyType) == 'd1BIND_OUT').\
+ execute().fetchall(),
+ [('d1', 'd1BIND_OUT')]
+ )
+
+ eq_(
+ select([t.c.data, type_coerce(t.c.data, MyType)]).\
+ where(t.c.data == type_coerce('d1BIND_OUT', MyType)).\
+ execute().fetchall(),
+ [('d1', 'd1BIND_OUT')]
+ )
+
+ eq_(
+ select([t.c.data, type_coerce(t.c.data, MyType)]).\
+ where(t.c.data == type_coerce(None, MyType)).\
+ execute().fetchall(),
+ []
+ )
+
+ eq_(
+ select([t.c.data, type_coerce(t.c.data, MyType)]).\
+ where(type_coerce(t.c.data, MyType) == None).\
+ execute().fetchall(),
+ []
+ )
+
@classmethod
def setup_class(cls):
global users, metadata
@@ -838,8 +948,9 @@ class ExpressionTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
def test_typedec_operator_adapt(self):
expr = test_table.c.bvalue + "hi"
- assert expr.type.__class__ is String
-
+ assert expr.type.__class__ is MyTypeDec
+ assert expr.right.type.__class__ is MyTypeDec
+
eq_(
testing.db.execute(select([expr.label('foo')])).scalar(),
"BIND_INfooBIND_INhiBIND_OUT"
@@ -864,7 +975,7 @@ class ExpressionTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
use_default_dialect=True
)
- assert expr.type.__class__ is String
+ assert expr.type.__class__ is MyTypeDec
eq_(
testing.db.execute(select([expr.label('foo')])).scalar(),
"BIND_INfooBIND_IN6BIND_OUT"
@@ -944,8 +1055,6 @@ class ExpressionTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
"a + b"
)
-
-
def test_expression_typing(self):
expr = column('bar', Integer) - 3
@@ -964,6 +1073,23 @@ class ExpressionTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
assert distinct(test_table.c.data).type == test_table.c.data.type
assert test_table.c.data.distinct().type == test_table.c.data.type
+class CompileTest(TestBase, AssertsCompiledSQL):
+ def test_default_compile(self):
+ """test that the base dialect of the type object is used
+ for default compilation.
+
+ """
+ for type_, expected in (
+ (String(), "VARCHAR"),
+ (Integer(), "INTEGER"),
+ (postgresql.INET(), "INET"),
+ (postgresql.FLOAT(), "FLOAT"),
+ (mysql.REAL(precision=8, scale=2), "REAL(8, 2)"),
+ (postgresql.REAL(), "REAL"),
+ (INTEGER(), "INTEGER"),
+ (mysql.INTEGER(display_width=5), "INTEGER(5)")
+ ):
+ self.assert_compile(type_, expected)
class DateTest(TestBase, AssertsExecutionResults):
@classmethod