summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES31
-rw-r--r--doc/build/orm/mapper_config.rst29
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py123
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py69
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py22
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py15
-rw-r--r--lib/sqlalchemy/orm/__init__.py22
-rw-r--r--lib/sqlalchemy/orm/interfaces.py4
-rw-r--r--lib/sqlalchemy/orm/mapper.py2
-rw-r--r--lib/sqlalchemy/orm/properties.py5
-rw-r--r--lib/sqlalchemy/orm/strategies.py7
-rw-r--r--test/dialect/test_oracle.py44
-rw-r--r--test/dialect/test_postgresql.py76
-rw-r--r--test/orm/test_mapper.py11
-rw-r--r--test/orm/test_relationships.py389
15 files changed, 677 insertions, 172 deletions
diff --git a/CHANGES b/CHANGES
index d179b0115..86fac65a2 100644
--- a/CHANGES
+++ b/CHANGES
@@ -28,6 +28,16 @@ CHANGES
transformed to the empty slice -1:0 that resulted in
IndexError. [ticket:1968]
+ - The mapper argument "primary_key" can be passed as a
+ single column as well as a list or tuple. [ticket:1971]
+ The documentation examples that illustrated it as a
+ scalar value have been changed to lists.
+
+ - Added active_history flag to relationship()
+ and column_property(), forces attribute events to
+ always load the "old" value, so that it's available to
+ attributes.get_history(). [ticket:1961]
+
- sql
- The 'info' attribute of Column is copied during
Column.copy(), i.e. as occurs when using columns
@@ -42,7 +52,17 @@ CHANGES
version of RowProxy, as well as 2.7 style
"collections.Sequence" registration for RowProxy.
[ticket:1871]
-
+
+- postgresql
+ - Ensured every numeric, float, int code, scalar + array,
+ are recognized by psycopg2 and pg8000's "numeric"
+ base type. [ticket:1955]
+
+ - Added as_uuid=True flag to the UUID type, will receive
+ and return values as Python UUID() objects rather than
+ strings. Currently, the UUID type is only known to
+ work with psycopg2. [ticket:1956]
+
- mysql
- Fixed error handling for Jython + zxjdbc, such that
has_table() property works again. Regression from
@@ -60,6 +80,15 @@ CHANGES
than that of the parent table doesn't render at all,
as cross-schema references do not appear to be supported.
+- oracle
+ - The cx_oracle "decimal detection" logic, which takes place
+ for for result set columns with ambiguous numeric characteristics,
+ now uses the decimal point character determined by the locale/
+ NLS_LANG setting, using an on-first-connect detection of
+ this character. cx_oracle 5.0.3 or greater is also required
+ when using a non-period-decimal-point NLS_LANG setting.
+ [ticket:1953].
+
- declarative
- An error is raised if __table_args__ is not in tuple
or dict format, and is not None. [ticket:1972]
diff --git a/doc/build/orm/mapper_config.rst b/doc/build/orm/mapper_config.rst
index a8b86abcd..40512f520 100644
--- a/doc/build/orm/mapper_config.rst
+++ b/doc/build/orm/mapper_config.rst
@@ -142,6 +142,33 @@ together using a list, as below where we map to a :func:`~.expression.join`::
For further examples on this particular use case, see :ref:`maptojoin`.
+column_property API
+~~~~~~~~~~~~~~~~~~~
+
+The establishment of a :class:`.Column` on a :func:`.mapper` can be further
+customized using the :func:`.column_property` function, as specified
+to the ``properties`` dictionary. This function is
+usually invoked implicitly for each mapped :class:`.Column`. Explicit usage
+looks like::
+
+ from sqlalchemy.orm import mapper, column_property
+
+ mapper(User, users, properties={
+ 'name':column_property(users.c.name, active_history=True)
+ })
+
+or with declarative::
+
+ class User(Base):
+ __tablename__ = 'users'
+
+ id = Column(Integer, primary_key=True)
+ name = column_property(Column(String(50)), active_history=True)
+
+Further examples of :func:`.column_property` are at :ref:`mapper_sql_expressions`.
+
+.. autofunction:: column_property
+
.. _deferred:
Deferred Column Loading
@@ -267,8 +294,6 @@ Correlated subqueries may be used as well::
The declarative form of the above is described in :ref:`declarative_sql_expressions`.
-.. autofunction:: column_property
-
Note that :func:`.column_property` is used to provide the effect of a SQL
expression that is actively rendered into the SELECT generated for a
particular mapped class. Alternatively, for the typical attribute that
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index eb25e614e..87a84e514 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -66,6 +66,52 @@ Two Phase Transaction Support
Two Phase transactions are implemented using XA transactions. Success has been reported
with this feature but it should be regarded as experimental.
+Precision Numerics
+------------------
+
+The SQLAlchemy dialect goes thorugh a lot of steps to ensure
+that decimal numbers are sent and received with full accuracy.
+An "outputtypehandler" callable is associated with each
+cx_oracle connection object which detects numeric types and
+receives them as string values, instead of receiving a Python
+``float`` directly, which is then passed to the Python
+``Decimal`` constructor. The :class:`.Numeric` and
+:class:`.Float` types under the cx_oracle dialect are aware of
+this behavior, and will coerce the ``Decimal`` to ``float`` if
+the ``asdecimal`` flag is ``False`` (default on :class:`.Float`,
+optional on :class:`.Numeric`).
+
+The handler attempts to use the "precision" and "scale"
+attributes of the result set column to best determine if
+subsequent incoming values should be received as ``Decimal`` as
+opposed to int (in which case no processing is added). There are
+several scenarios where OCI_ does not provide unambiguous data
+as to the numeric type, including some situations where
+individual rows may return a combination of floating point and
+integer values. Certain values for "precision" and "scale" have
+been observed to determine this scenario. When it occurs, the
+outputtypehandler receives as string and then passes off to a
+processing function which detects, for each returned value, if a
+decimal point is present, and if so converts to ``Decimal``,
+otherwise to int. The intention is that simple int-based
+statements like "SELECT my_seq.nextval() FROM DUAL" continue to
+return ints and not ``Decimal`` objects, and that any kind of
+floating point value is received as a string so that there is no
+floating point loss of precision.
+
+The "decimal point is present" logic itself is also sensitive to
+locale. Under OCI_, this is controlled by the NLS_LANG
+environment variable. Upon first connection, the dialect runs a
+test to determine the current "decimal" character, which can be
+a comma "," for european locales. From that point forward the
+outputtypehandler uses that character to represent a decimal
+point (this behavior is new in version 0.6.6). Note that
+cx_oracle 5.0.3 or greater is required when dealing with
+numerics with locale settings that don't use a period "." as the
+decimal character.
+
+.. _OCI: http://www.oracle.com/technetwork/database/features/oci/index.html
+
"""
from sqlalchemy.dialects.oracle.base import OracleCompiler, OracleDialect, \
@@ -76,6 +122,7 @@ from sqlalchemy import types as sqltypes, util, exc, processors
from datetime import datetime
import random
from decimal import Decimal
+import re
class _OracleNumeric(sqltypes.Numeric):
def bind_processor(self, dialect):
@@ -473,37 +520,80 @@ class OracleDialect_cx_oracle(OracleDialect):
self.dbapi.BLOB: oracle.BLOB(),
self.dbapi.BINARY: oracle.RAW(),
}
+ @classmethod
+ def dbapi(cls):
+ import cx_Oracle
+ return cx_Oracle
def initialize(self, connection):
super(OracleDialect_cx_oracle, self).initialize(connection)
if self._is_oracle_8:
self.supports_unicode_binds = False
+ self._detect_decimal_char(connection)
+
+ def _detect_decimal_char(self, connection):
+ """detect if the decimal separator character is not '.', as
+ is the case with european locale settings for NLS_LANG.
+
+ cx_oracle itself uses similar logic when it formats Python
+ Decimal objects to strings on the bind side (as of 5.0.3),
+ as Oracle sends/receives string numerics only in the
+ current locale.
+
+ """
+ if self.cx_oracle_ver < (5,):
+ # no output type handlers before version 5
+ return
+
+ cx_Oracle = self.dbapi
+ conn = connection.connection
+
+ # override the output_type_handler that's
+ # on the cx_oracle connection with a plain
+ # one on the cursor
+
+ def output_type_handler(cursor, name, defaultType,
+ size, precision, scale):
+ return cursor.var(
+ cx_Oracle.STRING,
+ 255, arraysize=cursor.arraysize)
+
+ cursor = conn.cursor()
+ cursor.outputtypehandler = output_type_handler
+ cursor.execute("SELECT 0.1 FROM DUAL")
+ val = cursor.fetchone()[0]
+ cursor.close()
+ char = re.match(r"([\.,])", val).group(1)
+ if char != '.':
+ _detect_decimal = self._detect_decimal
+ self._detect_decimal = \
+ lambda value: _detect_decimal(value.replace(char, '.'))
+ self._to_decimal = \
+ lambda value: Decimal(value.replace(char, '.'))
+
+ def _detect_decimal(self, value):
+ if "." in value:
+ return Decimal(value)
+ else:
+ return int(value)
+
+ _to_decimal = Decimal
- @classmethod
- def dbapi(cls):
- import cx_Oracle
- return cx_Oracle
-
def on_connect(self):
if self.cx_oracle_ver < (5,):
# no output type handlers before version 5
return
- def maybe_decimal(value):
- if "." in value:
- return Decimal(value)
- else:
- return int(value)
-
cx_Oracle = self.dbapi
- def output_type_handler(cursor, name, defaultType, size, precision, scale):
+ def output_type_handler(cursor, name, defaultType,
+ size, precision, scale):
# convert all NUMBER with precision + positive scale to Decimal
# this almost allows "native decimal" mode.
if defaultType == cx_Oracle.NUMBER and precision and scale > 0:
return cursor.var(
cx_Oracle.STRING,
255,
- outconverter=Decimal,
+ outconverter=self._to_decimal,
arraysize=cursor.arraysize)
# if NUMBER with zero precision and 0 or neg scale, this appears
# to indicate "ambiguous". Use a slower converter that will
@@ -515,7 +605,7 @@ class OracleDialect_cx_oracle(OracleDialect):
return cursor.var(
cx_Oracle.STRING,
255,
- outconverter=maybe_decimal,
+ outconverter=self._detect_decimal,
arraysize=cursor.arraysize)
# allow all strings to come back natively as Unicode
elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
@@ -578,7 +668,10 @@ class OracleDialect_cx_oracle(OracleDialect):
return ([], opts)
def _get_server_version_info(self, connection):
- return tuple(int(x) for x in connection.connection.version.split('.'))
+ return tuple(
+ int(x)
+ for x in connection.connection.version.split('.')
+ )
def is_disconnect(self, e):
if isinstance(e, self.dbapi.InterfaceError):
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 0d103cb0d..7b1a97c32 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -94,13 +94,18 @@ from sqlalchemy.sql import compiler, expression, util as sql_util
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import types as sqltypes
+try:
+ from uuid import UUID as _python_UUID
+except ImportError:
+ _python_UUID = None
+
from sqlalchemy.types import INTEGER, BIGINT, SMALLINT, VARCHAR, \
CHAR, TEXT, FLOAT, NUMERIC, \
DATE, BOOLEAN
-_DECIMAL_TYPES = (1700, 1231)
+_DECIMAL_TYPES = (1231, 1700)
_FLOAT_TYPES = (700, 701, 1021, 1022)
-
+_INT_TYPES = (20, 21, 23, 26, 1005, 1007, 1016)
class REAL(sqltypes.Float):
__visit_name__ = "REAL"
@@ -134,6 +139,12 @@ class TIME(sqltypes.TIME):
self.precision = precision
class INTERVAL(sqltypes.TypeEngine):
+ """Postgresql INTERVAL type.
+
+ The INTERVAL type may not be supported on all DBAPIs.
+ It is known to work on psycopg2 and not pg8000 or zxjdbc.
+
+ """
__visit_name__ = 'INTERVAL'
def __init__(self, precision=None):
self.precision = precision
@@ -156,17 +167,67 @@ class BIT(sqltypes.TypeEngine):
PGBit = BIT
class UUID(sqltypes.TypeEngine):
+ """Postgresql UUID type.
+
+ Represents the UUID column type, interpreting
+ data either as natively returned by the DBAPI
+ or as Python uuid objects.
+
+ The UUID type may not be supported on all DBAPIs.
+ It is known to work on psycopg2 and not pg8000.
+
+ """
__visit_name__ = 'UUID'
+
+ def __init__(self, as_uuid=False):
+ """Construct a UUID type.
+
+
+ :param as_uuid=False: if True, values will be interpreted
+ as Python uuid objects, converting to/from string via the
+ DBAPI.
+
+ """
+ if as_uuid and _python_UUID is None:
+ raise NotImplementedError(
+ "This version of Python does not support the native UUID type."
+ )
+ self.as_uuid = as_uuid
+
+ def bind_processor(self, dialect):
+ if self.as_uuid:
+ def process(value):
+ if value is not None:
+ value = str(value)
+ return value
+ return process
+ else:
+ return None
+
+ def result_processor(self, dialect, coltype):
+ if self.as_uuid:
+ def process(value):
+ if value is not None:
+ value = _python_UUID(value)
+ return value
+ return process
+ else:
+ return None
+
PGUuid = UUID
class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
"""Postgresql ARRAY type.
Represents values as Python lists.
+
+ The ARRAY type may not be supported on all DBAPIs.
+ It is known to work on psycopg2 and not pg8000.
**Note:** be sure to read the notes for
- :class:`~sqlalchemy.types.MutableType` regarding ORM
- performance implications.
+ :class:`.MutableType` regarding ORM
+ performance implications. The :class:`.ARRAY` type's
+ mutability can be disabled using the "mutable" flag.
"""
__visit_name__ = 'ARRAY'
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index 6af2cbd76..7b1d8e6a7 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -9,14 +9,16 @@ URLs are of the form
Unicode
-------
-pg8000 requires that the postgresql client encoding be configured in the postgresql.conf file
-in order to use encodings other than ascii. Set this value to the same value as
-the "encoding" parameter on create_engine(), usually "utf-8".
+pg8000 requires that the postgresql client encoding be
+configured in the postgresql.conf file in order to use encodings
+other than ascii. Set this value to the same value as the
+"encoding" parameter on create_engine(), usually "utf-8".
Interval
--------
-Passing data from/to the Interval type is not supported as of yet.
+Passing data from/to the Interval type is not supported as of
+yet.
"""
import decimal
@@ -27,26 +29,28 @@ from sqlalchemy import processors
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql.base import PGDialect, \
PGCompiler, PGIdentifierPreparer, PGExecutionContext,\
- _DECIMAL_TYPES, _FLOAT_TYPES
+ _DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES
class _PGNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
return processors.to_decimal_processor_factory(decimal.Decimal)
- elif coltype in _DECIMAL_TYPES:
+ elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
else:
- raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
+ raise exc.InvalidRequestError(
+ "Unknown PG numeric type: %d" % coltype)
else:
if coltype in _FLOAT_TYPES:
# pg8000 returns float natively for 701
return None
- elif coltype in _DECIMAL_TYPES:
+ elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
return processors.to_float
else:
- raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
+ raise exc.InvalidRequestError(
+ "Unknown PG numeric type: %d" % coltype)
class PGExecutionContext_pg8000(PGExecutionContext):
pass
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 6e1ea92c1..88e6ce670 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -96,8 +96,9 @@ from sqlalchemy.sql import expression
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql.base import PGDialect, PGCompiler, \
- PGIdentifierPreparer, PGExecutionContext, \
- ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES
+ PGIdentifierPreparer, PGExecutionContext, \
+ ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
+ _INT_TYPES
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
@@ -111,19 +112,21 @@ class _PGNumeric(sqltypes.Numeric):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
return processors.to_decimal_processor_factory(decimal.Decimal)
- elif coltype in _DECIMAL_TYPES:
+ elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
else:
- raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
+ raise exc.InvalidRequestError(
+ "Unknown PG numeric type: %d" % coltype)
else:
if coltype in _FLOAT_TYPES:
# pg8000 returns float natively for 701
return None
- elif coltype in _DECIMAL_TYPES:
+ elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
return processors.to_float
else:
- raise exc.InvalidRequestError("Unknown PG numeric type: %d" % coltype)
+ raise exc.InvalidRequestError(
+ "Unknown PG numeric type: %d" % coltype)
class _PGEnum(ENUM):
def __init__(self, *arg, **kw):
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index c2417d138..b51142909 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -206,6 +206,16 @@ def relationship(argument, secondary=None, **kwargs):
generally mutually exclusive with the use of the *secondary*
keyword argument.
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ many-to-one reference should be loaded when replaced, if
+ not already loaded. Normally, history tracking logic for
+ simple many-to-ones only needs to be aware of the "new"
+ value in order to perform a flush. This flag is available
+ for applications that make use of
+ :func:`.attributes.get_history` which also need to know
+ the "previous" value of the attribute. (New in 0.6.6)
+
:param backref:
indicates the string name of a property to be placed on the related
mapper's class that will handle this relationship in the other
@@ -576,7 +586,7 @@ def column_property(*args, **kwargs):
"""Provide a column-level property for use with a Mapper.
Column-based properties can normally be applied to the mapper's
- ``properties`` dictionary using the ``schema.Column`` element directly.
+ ``properties`` dictionary using the :class:`.Column` element directly.
Use this function when the given column is not directly present within the
mapper's selectable; examples include SQL expressions, functions, and
scalar SELECT queries.
@@ -587,6 +597,16 @@ def column_property(*args, **kwargs):
:param \*cols:
list of Column objects to be mapped.
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ scalar attribute should be loaded when replaced, if not
+ already loaded. Normally, history tracking logic for
+ simple non-primary-key scalar values only needs to be
+ aware of the "new" value in order to perform a flush. This
+ flag is available for applications that make use of
+ :func:`.attributes.get_history` which also need to know
+ the "previous" value of the attribute. (new in 0.6.6)
+
:param comparator_factory: a class which extends
:class:`.ColumnProperty.Comparator` which provides custom SQL clause
generation for comparison operations.
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index c3c9c754f..a6fe153e5 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -888,6 +888,10 @@ class AttributeExtension(object):
active_history = True
"""indicates that the set() method would like to receive the 'old' value,
even if it means firing lazy callables.
+
+ Note that ``active_history`` can also be set directly via
+ :func:`.column_property` and :func:`.relationship`.
+
"""
def append(self, state, value, initiator):
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index e9da4f533..c1045226c 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -107,7 +107,7 @@ class Mapper(object):
self.class_manager = None
- self.primary_key_argument = primary_key
+ self.primary_key_argument = util.to_list(primary_key)
self.non_primary = non_primary
if order_by is not False:
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index feee041ce..edfb861f4 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -60,6 +60,7 @@ class ColumnProperty(StrategizedProperty):
self.__class__.Comparator)
self.descriptor = kwargs.pop('descriptor', None)
self.extension = kwargs.pop('extension', None)
+ self.active_history = kwargs.pop('active_history', False)
if 'doc' in kwargs:
self.doc = kwargs.pop('doc')
@@ -114,6 +115,7 @@ class ColumnProperty(StrategizedProperty):
return ColumnProperty(
deferred=self.deferred,
group=self.group,
+ active_history=self.active_history,
*self.columns)
def _getattr(self, state, dict_, column, passive=False):
@@ -184,6 +186,7 @@ class CompositeProperty(ColumnProperty):
deferred=self.deferred,
group=self.group,
composite_class=self.composite_class,
+ active_history=self.active_history,
*self.columns)
def do_init(self):
@@ -444,6 +447,7 @@ class RelationshipProperty(StrategizedProperty):
comparator_factory=None,
single_parent=False, innerjoin=False,
doc=None,
+ active_history=False,
cascade_backrefs=True,
load_on_pending=False,
strategy_class=None, _local_remote_pairs=None,
@@ -469,6 +473,7 @@ class RelationshipProperty(StrategizedProperty):
self.query_class = query_class
self.innerjoin = innerjoin
self.doc = doc
+ self.active_history = active_history
self.join_depth = join_depth
self.local_remote_pairs = _local_remote_pairs
self.extension = extension
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 398a63e7a..04a23f000 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -108,7 +108,8 @@ class ColumnLoader(LoaderStrategy):
self.is_class_level = True
coltype = self.columns[0].type
# TODO: check all columns ? check for foreign key as well?
- active_history = self.columns[0].primary_key
+ active_history = self.parent_property.active_history or \
+ self.columns[0].primary_key
_register_attribute(self, mapper, useobject=False,
compare_function=coltype.compare_values,
@@ -163,8 +164,7 @@ class CompositeColumnLoader(ColumnLoader):
_register_attribute(self, mapper, useobject=False,
compare_function=compare,
copy_function=copy,
- mutable_scalars=True
- #active_history ?
+ mutable_scalars=True,
)
def create_row_processor(self, selectcontext, path, mapper,
@@ -398,6 +398,7 @@ class LazyLoader(AbstractRelationshipLoader):
uselist = self.parent_property.uselist,
typecallable = self.parent_property.collection_class,
active_history = \
+ self.parent_property.active_history or \
self.parent_property.direction is not \
interfaces.MANYTOONE or \
not self.use_get,
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index 04b9d3274..6627015b9 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -367,7 +367,7 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
def server_version_info(self):
return (8, 2, 5)
- dialect = oracle.dialect()
+ dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi)
dialect._get_server_version_info = server_version_info
# before connect, assume modern DB
@@ -384,7 +384,8 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
self.assert_compile(Unicode(50),"VARCHAR(50)",dialect=dialect)
self.assert_compile(UnicodeText(),"CLOB",dialect=dialect)
- dialect = oracle.dialect(implicit_returning=True)
+ dialect = oracle.dialect(implicit_returning=True,
+ dbapi=testing.db.dialect.dbapi)
dialect._get_server_version_info = server_version_info
dialect.initialize(testing.db.connect())
assert dialect.implicit_returning
@@ -392,7 +393,7 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
def test_default_flags(self):
"""test with no initialization or server version info"""
- dialect = oracle.dialect()
+ dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi)
assert dialect._supports_char_length
assert dialect._supports_nchar
assert dialect.use_ansi
@@ -403,7 +404,7 @@ class CompatFlagsTest(TestBase, AssertsCompiledSQL):
def test_ora10_flags(self):
def server_version_info(self):
return (10, 2, 5)
- dialect = oracle.dialect()
+ dialect = oracle.dialect(dbapi=testing.db.dialect.dbapi)
dialect._get_server_version_info = server_version_info
dialect.initialize(testing.db.connect())
assert dialect._supports_char_length
@@ -1043,7 +1044,40 @@ class TypesTest(TestBase, AssertsCompiledSQL):
finally:
t.drop(engine)
-
+class EuroNumericTest(TestBase):
+ """test the numeric output_type_handler when using non-US locale for NLS_LANG."""
+
+ __only_on__ = 'oracle+cx_oracle'
+
+ def setup(self):
+ self.old_nls_lang = os.environ.get('NLS_LANG', False)
+ os.environ['NLS_LANG'] = "GERMAN"
+ self.engine = testing_engine()
+
+ def teardown(self):
+ if self.old_nls_lang is not False:
+ os.environ['NLS_LANG'] = self.old_nls_lang
+ else:
+ del os.environ['NLS_LANG']
+ self.engine.dispose()
+
+ @testing.provide_metadata
+ def test_output_type_handler(self):
+ for stmt, exp, kw in [
+ ("SELECT 0.1 FROM DUAL", Decimal("0.1"), {}),
+ ("SELECT 15 FROM DUAL", 15, {}),
+ ("SELECT CAST(15 AS NUMERIC(3, 1)) FROM DUAL", Decimal("15"), {}),
+ ("SELECT CAST(0.1 AS NUMERIC(5, 2)) FROM DUAL", Decimal("0.1"), {}),
+ ("SELECT :num FROM DUAL", Decimal("2.5"), {'num':Decimal("2.5")})
+ ]:
+ test_exp = self.engine.scalar(stmt, **kw)
+ eq_(
+ test_exp,
+ exp
+ )
+ assert type(test_exp) is type(exp)
+
+
class DontReflectIOTTest(TestBase):
"""test that index overflow tables aren't included in
table_names."""
diff --git a/test/dialect/test_postgresql.py b/test/dialect/test_postgresql.py
index e20274aef..f3eb91ef2 100644
--- a/test/dialect/test_postgresql.py
+++ b/test/dialect/test_postgresql.py
@@ -456,7 +456,25 @@ class EnumTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
assert t2.c.value2.type.schema == 'test_schema'
finally:
metadata.drop_all()
+
+class NumericInterpretationTest(TestBase):
+
+
+ def test_numeric_codes(self):
+ from sqlalchemy.dialects.postgresql import pg8000, psycopg2, base
+ from decimal import Decimal
+ for dialect in (pg8000.dialect(), psycopg2.dialect()):
+
+ typ = Numeric().dialect_impl(dialect)
+ for code in base._INT_TYPES + base._FLOAT_TYPES + \
+ base._DECIMAL_TYPES:
+ proc = typ.result_processor(dialect, code)
+ val = 23.7
+ if proc is not None:
+ val = proc(val)
+ assert val in (23.7, Decimal("23.7"))
+
class InsertTest(TestBase, AssertsExecutionResults):
__only_on__ = 'postgresql'
@@ -1866,6 +1884,64 @@ class SpecialTypesTest(TestBase, ComparesTables):
assert t.c.plain_interval.type.precision is None
assert t.c.precision_interval.type.precision == 3
+class UUIDTest(TestBase):
+ """Test the bind/return values of the UUID type."""
+
+ __only_on__ = 'postgresql'
+
+ @testing.fails_on('postgresql+pg8000', 'No support for UUID type')
+ def test_uuid_string(self):
+ import uuid
+ self._test_round_trip(
+ Table('utable', MetaData(),
+ Column('data', postgresql.UUID())
+ ),
+ str(uuid.uuid4()),
+ str(uuid.uuid4())
+ )
+
+ @testing.fails_on('postgresql+pg8000', 'No support for UUID type')
+ def test_uuid_uuid(self):
+ import uuid
+ self._test_round_trip(
+ Table('utable', MetaData(),
+ Column('data', postgresql.UUID(as_uuid=True))
+ ),
+ uuid.uuid4(),
+ uuid.uuid4()
+ )
+
+ def test_no_uuid_available(self):
+ from sqlalchemy.dialects.postgresql import base
+ uuid_type = base._python_UUID
+ base._python_UUID = None
+ try:
+ assert_raises(
+ NotImplementedError,
+ postgresql.UUID, as_uuid=True
+ )
+ finally:
+ base._python_UUID = uuid_type
+
+ def setup(self):
+ self.conn = testing.db.connect()
+ trans = self.conn.begin()
+
+ def teardown(self):
+ self.conn.close()
+
+ def _test_round_trip(self, utable, value1, value2):
+ utable.create(self.conn)
+ self.conn.execute(utable.insert(), {'data':value1})
+ self.conn.execute(utable.insert(), {'data':value2})
+ r = self.conn.execute(
+ select([utable.c.data]).
+ where(utable.c.data != value1)
+ )
+ eq_(r.fetchone()[0], value2)
+ eq_(r.fetchone(), None)
+
+
class MatchTest(TestBase, AssertsCompiledSQL):
__only_on__ = 'postgresql'
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index f041c8896..52714eb43 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -665,6 +665,17 @@ class MapperTest(_fixtures.FixtureTest):
None])
@testing.resolve_artifact_names
+ def test_scalar_pk_arg(self):
+ m1 = mapper(Item, items, primary_key=[items.c.id])
+ m2 = mapper(Keyword, keywords, primary_key=keywords.c.id)
+ m3 = mapper(User, users, primary_key=(users.c.id,))
+
+ assert m1.primary_key[0] is items.c.id
+ assert m2.primary_key[0] is keywords.c.id
+ assert m3.primary_key[0] is users.c.id
+
+
+ @testing.resolve_artifact_names
def test_custom_join(self):
"""select_from totally replace the FROM parameters."""
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index 187c9e534..03efa0183 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -5,25 +5,15 @@ from sqlalchemy.test import testing
from sqlalchemy import Integer, String, ForeignKey, MetaData, and_
from sqlalchemy.test.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, relation, \
- backref, create_session, compile_mappers, clear_mappers, sessionmaker
+ backref, create_session, compile_mappers, \
+ clear_mappers, sessionmaker, attributes,\
+ Session, composite, column_property
from sqlalchemy.test.testing import eq_, startswith_
from test.orm import _base, _fixtures
-class RelationshipTest(_base.MappedTest):
- """An extended topological sort test
-
- This is essentially an extension of the "dependency.py" topological sort
- test. In this test, a table is dependent on two other tables that are
- otherwise unrelated to each other. The dependency sort must ensure that
- this childmost table is below both parent tables in the outcome (a bug
- existed where this was not always the case).
-
- While the straight topological sort tests should expose this, since the
- sorting can be different due to subtle differences in program execution,
- this test case was exposing the bug whereas the simpler tests were not.
-
- """
+class DependencyTwoParentTest(_base.MappedTest):
+ """Test flush() when a mapper is dependent on multiple relationships"""
run_setup_mappers = 'once'
run_inserts = 'once'
@@ -32,18 +22,24 @@ class RelationshipTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("tbl_a", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("name", String(128)))
Table("tbl_b", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("name", String(128)))
Table("tbl_c", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"), nullable=False),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
+ nullable=False),
Column("name", String(128)))
Table("tbl_d", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"), nullable=False),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
+ nullable=False),
Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
Column("name", String(128)))
@@ -62,10 +58,12 @@ class RelationshipTest(_base.MappedTest):
@testing.resolve_artifact_names
def setup_mappers(cls):
mapper(A, tbl_a, properties=dict(
- c_rows=relationship(C, cascade="all, delete-orphan", backref="a_row")))
+ c_rows=relationship(C, cascade="all, delete-orphan",
+ backref="a_row")))
mapper(B, tbl_b)
mapper(C, tbl_c, properties=dict(
- d_rows=relationship(D, cascade="all, delete-orphan", backref="c_row")))
+ d_rows=relationship(D, cascade="all, delete-orphan",
+ backref="c_row")))
mapper(D, tbl_d, properties=dict(
b_row=relationship(B)))
@@ -101,8 +99,12 @@ class RelationshipTest(_base.MappedTest):
session.flush()
-class RelationshipTest2(_base.MappedTest):
- """The ultimate relationship() test:
+class CompositeSelfRefFKTest(_base.MappedTest):
+ """Tests a composite FK where, in
+ the relationship(), one col points
+ to itself in the same table.
+
+ this is a very unusual case::
company employee
---------- ----------
@@ -117,22 +119,13 @@ class RelationshipTest2(_base.MappedTest):
employee joins to its sub-employees
both on reports_to_id, *and on company_id to itself*.
- As of 0.5.5 we are making a slight behavioral change,
- such that the custom foreign_keys setting
- on the o2m side has to be explicitly
- unset on the backref m2o side - this to suit
- the vast majority of use cases where the backref()
- is to receive the same foreign_keys argument
- as the forwards reference. But we also
- have smartened the remote_side logic such that
- you don't even need the custom fks setting.
-
"""
@classmethod
def define_tables(cls, metadata):
Table('company_t', metadata,
- Column('company_id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('company_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', sa.Unicode(30)))
Table('employee_t', metadata,
@@ -163,7 +156,10 @@ class RelationshipTest2(_base.MappedTest):
def test_explicit(self):
mapper(Company, company_t)
mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, primaryjoin=employee_t.c.company_id==company_t.c.company_id, backref='employees'),
+ 'company':relationship(Company,
+ primaryjoin=employee_t.c.company_id==
+ company_t.c.company_id,
+ backref='employees'),
'reports_to':relationship(Employee, primaryjoin=
sa.and_(
employee_t.c.emp_id==employee_t.c.reports_to_id,
@@ -244,10 +240,12 @@ class RelationshipTest2(_base.MappedTest):
test_e5 = sess.query(Employee).get([c2.company_id, e5.emp_id])
assert test_e5.name == 'emp5', test_e5.name
assert [x.name for x in test_e1.employees] == ['emp2', 'emp3']
- assert sess.query(Employee).get([c1.company_id, 3]).reports_to.name == 'emp1'
- assert sess.query(Employee).get([c2.company_id, 3]).reports_to.name == 'emp5'
+ assert sess.query(Employee).\
+ get([c1.company_id, 3]).reports_to.name == 'emp1'
+ assert sess.query(Employee).\
+ get([c2.company_id, 3]).reports_to.name == 'emp5'
-class RelationshipTest3(_base.MappedTest):
+class ComplexPostUpdateTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("jobs", metadata,
@@ -311,7 +309,8 @@ class RelationshipTest3(_base.MappedTest):
comment.content = u'some content'
return self.currentversion
def add_comment(self):
- nextnum = max([-1] + [c.comment_id for c in self.comments]) + 1
+ nextnum = max([-1] +
+ [c.comment_id for c in self.comments]) + 1
newcomment = PageComment()
newcomment.comment_id = nextnum
self.comments.append(newcomment)
@@ -340,7 +339,7 @@ class RelationshipTest3(_base.MappedTest):
PageVersion,
cascade="all, delete-orphan",
primaryjoin=sa.and_(pages.c.jobno==pageversions.c.jobno,
- pages.c.pagename==pageversions.c.pagename),
+ pages.c.pagename==pageversions.c.pagename),
order_by=pageversions.c.version,
backref=backref('page',lazy='joined')
)})
@@ -348,7 +347,7 @@ class RelationshipTest3(_base.MappedTest):
'page': relationship(
Page,
primaryjoin=sa.and_(pages.c.jobno==pagecomments.c.jobno,
- pages.c.pagename==pagecomments.c.pagename),
+ pages.c.pagename==pagecomments.c.pagename),
backref=backref("comments",
cascade="all, delete-orphan",
order_by=pagecomments.c.comment_id))})
@@ -389,13 +388,14 @@ class RelationshipTest3(_base.MappedTest):
s.delete(j)
s.flush()
-class RelationshipTest4(_base.MappedTest):
+class FKsAsPksTest(_base.MappedTest):
"""Syncrules on foreign keys that are also primary"""
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id",Integer,primary_key=True, test_needs_autoincrement=True),
+ Column("id",Integer,primary_key=True,
+ test_needs_autoincrement=True),
Column("foo",Integer,),
test_needs_fk=True)
@@ -413,7 +413,8 @@ class RelationshipTest4(_base.MappedTest):
@testing.resolve_artifact_names
def test_onetoone_switch(self):
- """test that active history is enabled on a one-to-many/one that has use_get==True"""
+ """test that active history is enabled on a
+ one-to-many/one that has use_get==True"""
mapper(A, tableA, properties={
'b':relationship(B, cascade="all,delete-orphan", uselist=False)})
@@ -502,7 +503,8 @@ class RelationshipTest4(_base.MappedTest):
@testing.resolve_artifact_names
def test_delete_cascade_BtoA(self):
- """No 'blank the PK' error when the child is to be deleted as part of a cascade"""
+ """No 'blank the PK' error when the child is to
+ be deleted as part of a cascade"""
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
@@ -527,7 +529,9 @@ class RelationshipTest4(_base.MappedTest):
@testing.resolve_artifact_names
def test_delete_cascade_AtoB(self):
- """No 'blank the PK' error when the child is to be deleted as part of a cascade"""
+ """No 'blank the PK' error when the child is to
+ be deleted as part of a cascade"""
+
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
"save-update, delete, delete-orphan"):
@@ -590,19 +594,25 @@ class RelationshipTest4(_base.MappedTest):
assert a1 not in sess
assert b1 not in sess
-class RelationshipToUniqueTest(_base.MappedTest):
- """test a relationship based on a primary join against a unique non-pk column"""
+class UniqueColReferenceSwitchTest(_base.MappedTest):
+ """test a relationship based on a primary
+ join against a unique non-pk column"""
@classmethod
def define_tables(cls, metadata):
Table("table_a", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("ident", String(10), nullable=False, unique=True),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("ident", String(10), nullable=False,
+ unique=True),
)
Table("table_b", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("a_ident", String(10), ForeignKey('table_a.ident'), nullable=False),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("a_ident", String(10),
+ ForeignKey('table_a.ident'),
+ nullable=False),
)
@classmethod
@@ -632,7 +642,7 @@ class RelationshipToUniqueTest(_base.MappedTest):
session.delete(a1)
session.flush()
-class RelationshipTest5(_base.MappedTest):
+class RelationshipToSelectableTest(_base.MappedTest):
"""Test a map to a select that relates to a map to the table."""
@classmethod
@@ -671,7 +681,8 @@ class RelationshipTest5(_base.MappedTest):
order_by=sa.asc(items.c.id),
primaryjoin=sa.and_(
container_select.c.policyNum==items.c.policyNum,
- container_select.c.policyEffDate==items.c.policyEffDate,
+ container_select.c.policyEffDate==
+ items.c.policyEffDate,
container_select.c.type==items.c.type),
foreign_keys=[
items.c.policyNum,
@@ -697,7 +708,7 @@ class RelationshipTest5(_base.MappedTest):
for old, new in zip(con.lineItems, newcon.lineItems):
eq_(old.id, new.id)
-class RelationshipTest6(_base.MappedTest):
+class FKEquatedToConstantTest(_base.MappedTest):
"""test a relationship with a non-column entity in the primary join,
is not viewonly, and also has the non-column's clause mentioned in the
foreign keys list.
@@ -706,12 +717,14 @@ class RelationshipTest6(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
- Table('tags', metadata, Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
+ Table('tags', metadata, Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("data", String(50)),
)
Table('tag_foo', metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('tagid', Integer),
Column("data", String(50)),
)
@@ -742,7 +755,10 @@ class RelationshipTest6(_base.MappedTest):
sess.expunge_all()
# relationship works
- eq_(sess.query(Tag).all(), [Tag(data='some tag', foo=[TagInstance(data='iplc_case')])])
+ eq_(
+ sess.query(Tag).all(),
+ [Tag(data='some tag', foo=[TagInstance(data='iplc_case')])]
+ )
# both TagInstances were persisted
eq_(
@@ -755,11 +771,13 @@ class BackrefPropagatesForwardsArgs(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50))
)
Table('addresses', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('user_id', Integer),
Column('email', String(50))
)
@@ -791,24 +809,28 @@ class BackrefPropagatesForwardsArgs(_base.MappedTest):
])
class AmbiguousJoinInterpretedAsSelfRef(_base.MappedTest):
- """test ambiguous joins due to FKs on both sides treated as self-referential.
+ """test ambiguous joins due to FKs on both sides treated as
+ self-referential.
- this mapping is very similar to that of test/orm/inheritance/query.py
- SelfReferentialTestJoinedToBase , except that inheritance is not used
- here.
+ this mapping is very similar to that of
+ test/orm/inheritance/query.py
+ SelfReferentialTestJoinedToBase , except that inheritance is
+ not used here.
"""
@classmethod
def define_tables(cls, metadata):
subscriber_table = Table('subscriber', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('dummy', String(10)) # to appease older sqlite version
)
address_table = Table('address',
metadata,
- Column('subscriber_id', Integer, ForeignKey('subscriber.id'), primary_key=True),
+ Column('subscriber_id', Integer,
+ ForeignKey('subscriber.id'), primary_key=True),
Column('type', String(1), primary_key=True),
)
@@ -816,7 +838,8 @@ class AmbiguousJoinInterpretedAsSelfRef(_base.MappedTest):
@testing.resolve_artifact_names
def setup_mappers(cls):
subscriber_and_address = subscriber.join(address,
- and_(address.c.subscriber_id==subscriber.c.id, address.c.type.in_(['A', 'B', 'C'])))
+ and_(address.c.subscriber_id==subscriber.c.id,
+ address.c.type.in_(['A', 'B', 'C'])))
class Address(_base.ComparableEntity):
pass
@@ -918,8 +941,10 @@ class ManualBackrefTest(_fixtures.FixtureTest):
})
assert_raises_message(sa.exc.ArgumentError,
- r"reverse_property 'dingaling' on relationship User.addresses references "
- "relationship Address.dingaling, which does not reference mapper Mapper\|User\|users",
+ r"reverse_property 'dingaling' on relationship "
+ "User.addresses references "
+ "relationship Address.dingaling, which does not "
+ "reference mapper Mapper\|User\|users",
compile_mappers)
class JoinConditionErrorTest(testing.TestBase):
@@ -966,7 +991,8 @@ class JoinConditionErrorTest(testing.TestBase):
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2, primaryjoin=t1.join(t2))})
+ mapper(C1, t1, properties={'c2':relationship(C2,
+ primaryjoin=t1.join(t2))})
mapper(C2, t2)
assert_raises(sa.exc.ArgumentError, compile_mappers)
@@ -996,7 +1022,9 @@ class JoinConditionErrorTest(testing.TestBase):
assert_raises_message(
sa.exc.ArgumentError,
- "Column-based expression object expected for argument '%s'; got: '%s', type %r" % (argname, arg[0], type(arg[0])),
+ "Column-based expression object expected "
+ "for argument '%s'; got: '%s', type %r" %
+ (argname, arg[0], type(arg[0])),
compile_mappers)
@@ -1053,23 +1081,28 @@ class JoinConditionErrorTest(testing.TestBase):
clear_mappers()
class TypeMatchTest(_base.MappedTest):
- """test errors raised when trying to add items whose type is not handled by a relationship"""
+ """test errors raised when trying to add items
+ whose type is not handled by a relationship"""
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
- Column('aid', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('aid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(30)))
Table("b", metadata,
- Column('bid', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('bid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("a_id", Integer, ForeignKey("a.aid")),
Column('data', String(30)))
Table("c", metadata,
- Column('cid', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('cid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("b_id", Integer, ForeignKey("b.bid")),
Column('data', String(30)))
Table("d", metadata,
- Column('did', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('did', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("a_id", Integer, ForeignKey("a.aid")),
Column('data', String(30)))
@@ -1115,7 +1148,8 @@ class TypeMatchTest(_base.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item", sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
@testing.resolve_artifact_names
def test_o2m_nopoly_onflush(self):
@@ -1136,7 +1170,8 @@ class TypeMatchTest(_base.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item", sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
@testing.resolve_artifact_names
def test_m2o_nopoly_onflush(self):
@@ -1153,7 +1188,8 @@ class TypeMatchTest(_base.MappedTest):
sess.add(b1)
sess.add(d1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item", sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
@testing.resolve_artifact_names
def test_m2o_oncascade(self):
@@ -1168,7 +1204,8 @@ class TypeMatchTest(_base.MappedTest):
d1.a = b1
sess = create_session()
assert_raises_message(AssertionError,
- "doesn't handle objects of type", sess.add, d1)
+ "doesn't handle objects of type",
+ sess.add, d1)
class TypedAssociationTable(_base.MappedTest):
@@ -1224,10 +1261,12 @@ class ViewOnlyM2MBackrefTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
)
Table("t1t2", metadata,
@@ -1241,7 +1280,8 @@ class ViewOnlyM2MBackrefTest(_base.MappedTest):
class B(_base.ComparableEntity):pass
mapper(A, t1, properties={
- 'bs':relationship(B, secondary=t1t2, backref=backref('as_', viewonly=True))
+ 'bs':relationship(B, secondary=t1t2,
+ backref=backref('as_', viewonly=True))
})
mapper(B, t2)
@@ -1264,14 +1304,17 @@ class ViewOnlyOverlappingNames(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
Column('t1id', Integer, ForeignKey('t1.id')))
Table("t3", metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
Column('t2id', Integer, ForeignKey('t2.id')))
@@ -1324,14 +1367,17 @@ class ViewOnlyUniqueNames(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('t1id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('t1id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)))
Table("t2", metadata,
- Column('t2id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('t2id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
Table("t3", metadata,
- Column('t3id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('t3id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(40)),
Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
@@ -1505,10 +1551,12 @@ class ViewOnlyRepeatedLocalColumn(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
- Table('bars', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Table('bars', metadata, Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('fid1', Integer, ForeignKey('foos.id')),
Column('fid2', Integer, ForeignKey('foos.id')),
Column('data', String(50)))
@@ -1553,14 +1601,17 @@ class ViewOnlyComplexJoin(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)),
Column('t1id', Integer, ForeignKey('t1.id')))
Table('t3', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('t2tot3', metadata,
Column('t2id', Integer, ForeignKey('t2.id')),
@@ -1624,10 +1675,12 @@ class ExplicitLocalRemoteTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', String(50), primary_key=True, test_needs_autoincrement=True),
+ Column('id', String(50), primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)),
Column('t1id', String(50)))
@@ -1777,19 +1830,25 @@ class InvalidRemoteSideTest(_base.MappedTest):
't1s':relationship(T1, backref='parent')
})
- assert_raises_message(sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are "
- "both of the same direction <symbol 'ONETOMANY>. Did you "
- "mean to set remote_side on the many-to-one side ?", sa.orm.compile_mappers)
+ assert_raises_message(sa.exc.ArgumentError,
+ "T1.t1s and back-reference T1.parent are "
+ "both of the same direction <symbol 'ONETOMANY>. Did you "
+ "mean to set remote_side on the many-to-one side ?",
+ sa.orm.compile_mappers)
@testing.resolve_artifact_names
def test_m2o_backref(self):
mapper(T1, t1, properties={
- 't1s':relationship(T1, backref=backref('parent', remote_side=t1.c.id), remote_side=t1.c.id)
+ 't1s':relationship(T1,
+ backref=backref('parent', remote_side=t1.c.id),
+ remote_side=t1.c.id)
})
- assert_raises_message(sa.exc.ArgumentError, "T1.t1s and back-reference T1.parent are "
- "both of the same direction <symbol 'MANYTOONE>. Did you "
- "mean to set remote_side on the many-to-one side ?", sa.orm.compile_mappers)
+ assert_raises_message(sa.exc.ArgumentError,
+ "T1.t1s and back-reference T1.parent are "
+ "both of the same direction <symbol 'MANYTOONE>. Did you "
+ "mean to set remote_side on the many-to-one side ?",
+ sa.orm.compile_mappers)
@testing.resolve_artifact_names
def test_o2m_explicit(self):
@@ -1800,20 +1859,24 @@ class InvalidRemoteSideTest(_base.MappedTest):
# can't be sure of ordering here
assert_raises_message(sa.exc.ArgumentError,
- "both of the same direction <symbol 'ONETOMANY>. Did you "
- "mean to set remote_side on the many-to-one side ?", sa.orm.compile_mappers)
+ "both of the same direction <symbol 'ONETOMANY>. Did you "
+ "mean to set remote_side on the many-to-one side ?",
+ sa.orm.compile_mappers)
@testing.resolve_artifact_names
def test_m2o_explicit(self):
mapper(T1, t1, properties={
- 't1s':relationship(T1, back_populates='parent', remote_side=t1.c.id),
- 'parent':relationship(T1, back_populates='t1s', remote_side=t1.c.id)
+ 't1s':relationship(T1, back_populates='parent',
+ remote_side=t1.c.id),
+ 'parent':relationship(T1, back_populates='t1s',
+ remote_side=t1.c.id)
})
# can't be sure of ordering here
assert_raises_message(sa.exc.ArgumentError,
- "both of the same direction <symbol 'MANYTOONE>. Did you "
- "mean to set remote_side on the many-to-one side ?", sa.orm.compile_mappers)
+ "both of the same direction <symbol 'MANYTOONE>. Did you "
+ "mean to set remote_side on the many-to-one side ?",
+ sa.orm.compile_mappers)
class InvalidRelationshipEscalationTest(_base.MappedTest):
@@ -1872,7 +1935,8 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction "
+ "for primaryjoin condition",
sa.orm.compile_mappers)
@testing.resolve_artifact_names
@@ -1953,7 +2017,8 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for primaryjoin "
+ "condition",
sa.orm.compile_mappers)
@testing.resolve_artifact_names
@@ -2036,13 +2101,14 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for primaryjoin "
+ "condition",
sa.orm.compile_mappers)
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
'bars':relationship(Bar,
- primaryjoin=foos_with_fks.c.id==bars_with_fks.c.fid)})
+ primaryjoin=foos_with_fks.c.id==bars_with_fks.c.fid)})
mapper(Bar, bars_with_fks)
sa.orm.compile_mappers()
@@ -2054,7 +2120,8 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for primaryjoin "
+ "condition",
sa.orm.compile_mappers)
@@ -2067,7 +2134,8 @@ class InvalidRelationshipEscalationTest(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for primaryjoin "
+ "condition",
sa.orm.compile_mappers)
@@ -2148,9 +2216,12 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar, secondary=foobars_with_many_columns,
- primaryjoin=foos.c.id==foobars_with_many_columns.c.fid,
- secondaryjoin=foobars_with_many_columns.c.bid==bars.c.id)})
+ 'bars': relationship(Bar,
+ secondary=foobars_with_many_columns,
+ primaryjoin=foos.c.id==
+ foobars_with_many_columns.c.fid,
+ secondaryjoin=foobars_with_many_columns.c.bid==
+ bars.c.id)})
mapper(Bar, bars)
assert_raises_message(sa.exc.SAWarning,
@@ -2188,9 +2259,12 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar, secondary=foobars_with_many_columns,
- primaryjoin=foos.c.id==foobars_with_many_columns.c.fid,
- secondaryjoin=foobars_with_many_columns.c.bid==bars.c.id)})
+ 'bars': relationship(Bar,
+ secondary=foobars_with_many_columns,
+ primaryjoin=foos.c.id==
+ foobars_with_many_columns.c.fid,
+ secondaryjoin=foobars_with_many_columns.c.bid==
+ bars.c.id)})
mapper(Bar, bars)
sa.orm.compile_mappers()
eq_(
@@ -2214,7 +2288,8 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
assert_raises_message(
sa.exc.ArgumentError,
- "Could not determine relationship direction for primaryjoin condition",
+ "Could not determine relationship direction for "
+ "primaryjoin condition",
sa.orm.compile_mappers)
sa.orm.clear_mappers()
@@ -2226,7 +2301,8 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
mapper(Bar, bars)
assert_raises_message(
sa.exc.ArgumentError,
- "Could not locate any equated, locally mapped column pairs for primaryjoin condition ",
+ "Could not locate any equated, locally mapped column pairs for "
+ "primaryjoin condition ",
sa.orm.compile_mappers)
sa.orm.clear_mappers()
@@ -2279,8 +2355,71 @@ class InvalidRelationshipEscalationTestM2M(_base.MappedTest):
"Could not locate any equated, locally mapped column pairs for "
"secondaryjoin condition", sa.orm.compile_mappers)
+class ActiveHistoryFlagTest(_fixtures.FixtureTest):
+ run_inserts = None
+ run_deletes = None
+
+ def _test_attribute(self, obj, attrname, newvalue):
+ sess = Session()
+ sess.add(obj)
+ oldvalue = getattr(obj, attrname)
+ sess.commit()
+
+ # expired
+ assert attrname not in obj.__dict__
+
+ setattr(obj, attrname, newvalue)
+ eq_(
+ attributes.get_history(obj, attrname),
+ ([newvalue,], (), [oldvalue,])
+ )
+
+ @testing.resolve_artifact_names
+ def test_column_property_flag(self):
+ mapper(User, users, properties={
+ 'name':column_property(users.c.name, active_history=True)
+ })
+ u1 = User(name='jack')
+ self._test_attribute(u1, 'name', 'ed')
+
+ @testing.resolve_artifact_names
+ def test_relationship_property_flag(self):
+ mapper(Address, addresses, properties={
+ 'user':relationship(User, active_history=True)
+ })
+ mapper(User, users)
+ u1 = User(name='jack')
+ u2 = User(name='ed')
+ a1 = Address(email_address='a1', user=u1)
+ self._test_attribute(a1, 'user', u2)
+
+ @testing.resolve_artifact_names
+ def test_composite_property_flag(self):
+ # active_history is implicit for composites
+ # right now, no flag needed
+ class MyComposite(object):
+ def __init__(self, description, isopen):
+ self.description = description
+ self.isopen = isopen
+ def __composite_values__(self):
+ return [self.description, self.isopen]
+ def __eq__(self, other):
+ return isinstance(other, MyComposite) and \
+ other.description == self.description
+ mapper(Order, orders, properties={
+ 'composite':composite(
+ MyComposite,
+ orders.c.description,
+ orders.c.isopen)
+ })
+ o1 = Order(composite=MyComposite('foo', 1))
+ self._test_attribute(o1, "composite", MyComposite('bar', 1))
+
+
class RelationDeprecationTest(_base.MappedTest):
+ """test usage of the old 'relation' function."""
+
run_inserts = 'once'
run_deletes = None