summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2010-09-03 17:57:20 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2010-09-03 17:57:20 -0400
commit33b70a1f0259dd5e4acd2cb78c9a5ab6254cf9ce (patch)
tree08430231aa8f6d1d84487212ee2c43bc5df7a3cd /lib
parent47f56ac4990c7c3e6a020a837e91e39f41adf39e (diff)
parent6e83926657057c97239bef114e640f2b102be02c (diff)
downloadsqlalchemy-33b70a1f0259dd5e4acd2cb78c9a5ab6254cf9ce.tar.gz
merge from tip
Diffstat (limited to 'lib')
-rw-r--r--lib/sqlalchemy/dialects/oracle/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py48
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py5
-rw-r--r--lib/sqlalchemy/engine/base.py36
-rwxr-xr-xlib/sqlalchemy/ext/declarative.py7
-rw-r--r--lib/sqlalchemy/orm/__init__.py40
-rw-r--r--lib/sqlalchemy/orm/collections.py3
-rw-r--r--lib/sqlalchemy/orm/evaluator.py3
-rw-r--r--lib/sqlalchemy/orm/mapper.py179
-rw-r--r--lib/sqlalchemy/orm/properties.py23
-rw-r--r--lib/sqlalchemy/orm/query.py33
-rw-r--r--lib/sqlalchemy/orm/session.py40
-rw-r--r--lib/sqlalchemy/orm/state.py1
-rw-r--r--lib/sqlalchemy/orm/strategies.py29
-rw-r--r--lib/sqlalchemy/orm/sync.py2
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py6
-rw-r--r--lib/sqlalchemy/sql/expression.py12
-rw-r--r--lib/sqlalchemy/test/requires.py2
-rw-r--r--lib/sqlalchemy/types.py305
-rw-r--r--lib/sqlalchemy/util.py45
20 files changed, 538 insertions, 283 deletions
diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py
index 78d3c8fab..f6734013d 100644
--- a/lib/sqlalchemy/dialects/oracle/__init__.py
+++ b/lib/sqlalchemy/dialects/oracle/__init__.py
@@ -13,5 +13,5 @@ __all__ = (
'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'DATETIME', 'NUMBER',
'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW',
'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL',
-'VARCHAR2', 'NVARCHAR2'
+'VARCHAR2', 'NVARCHAR2', 'ROWID'
)
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index cb37f9558..0aa348953 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -222,6 +222,16 @@ class INTERVAL(sqltypes.TypeEngine):
@property
def _type_affinity(self):
return sqltypes.Interval
+
+class ROWID(sqltypes.TypeEngine):
+ """Oracle ROWID type.
+
+ When used in a cast() or similar, generates ROWID.
+
+ """
+ __visit_name__ = 'ROWID'
+
+
class _OracleBoolean(sqltypes.Boolean):
def get_dbapi_type(self, dbapi):
@@ -336,6 +346,9 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
def visit_RAW(self, type_):
return "RAW(%(length)s)" % {'length' : type_.length}
+ def visit_ROWID(self, type_):
+ return "ROWID"
+
class OracleCompiler(compiler.SQLCompiler):
"""Oracle compiler modifies the lexical structure of Select
statements to work under non-ANSI configured Oracle databases, if
@@ -886,11 +899,22 @@ class OracleDialect(default.DefaultDialect):
uniqueness = dict(NONUNIQUE=False, UNIQUE=True)
oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE)
+
+ def upper_name_set(names):
+ return set([i.upper() for i in names])
+
+ pk_names = upper_name_set(pkeys)
+
+ def remove_if_primary_key(index):
+ # don't include the primary key index
+ if index is not None and \
+ upper_name_set(index['column_names']) == pk_names:
+ indexes.pop()
+
+ index = None
for rset in rp:
- # don't include the primary key columns
- if rset.column_name in [s.upper() for s in pkeys]:
- continue
if rset.index_name != last_index_name:
+ remove_if_primary_key(index)
index = dict(name=self.normalize_name(rset.index_name), column_names=[])
indexes.append(index)
index['unique'] = uniqueness.get(rset.uniqueness, False)
@@ -900,6 +924,7 @@ class OracleDialect(default.DefaultDialect):
if not oracle_sys_col.match(rset.column_name):
index['column_names'].append(self.normalize_name(rset.column_name))
last_index_name = rset.index_name
+ remove_if_primary_key(index)
return indexes
@reflection.cache
@@ -932,7 +957,6 @@ class OracleDialect(default.DefaultDialect):
constraint_data = rp.fetchall()
return constraint_data
- @reflection.cache
def get_primary_keys(self, connection, table_name, schema=None, **kw):
"""
@@ -943,7 +967,10 @@ class OracleDialect(default.DefaultDialect):
dblink
"""
+ return self._get_primary_keys(connection, table_name, schema, **kw)[0]
+ @reflection.cache
+ def _get_primary_keys(self, connection, table_name, schema=None, **kw):
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
dblink = kw.get('dblink', '')
info_cache = kw.get('info_cache')
@@ -953,6 +980,7 @@ class OracleDialect(default.DefaultDialect):
resolve_synonyms, dblink,
info_cache=info_cache)
pkeys = []
+ constraint_name = None
constraint_data = self._get_constraint_data(connection, table_name,
schema, dblink,
info_cache=kw.get('info_cache'))
@@ -962,8 +990,18 @@ class OracleDialect(default.DefaultDialect):
(cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
if cons_type == 'P':
+ if constraint_name is None:
+ constraint_name = self.normalize_name(cons_name)
pkeys.append(local_column)
- return pkeys
+ return pkeys, constraint_name
+
+ def get_pk_constraint(self, connection, table_name, schema=None, **kw):
+ cols, name = self._get_primary_keys(connection, table_name, schema=schema, **kw)
+
+ return {
+ 'constrained_columns':cols,
+ 'name':name
+ }
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index 01bb66304..eb25e614e 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -220,6 +220,10 @@ class _OracleInterval(oracle.INTERVAL):
class _OracleRaw(oracle.RAW):
pass
+class _OracleRowid(oracle.ROWID):
+ def get_dbapi_type(self, dbapi):
+ return dbapi.ROWID
+
class OracleCompiler_cx_oracle(OracleCompiler):
def bindparam_string(self, name):
if self.preparer._bindparam_requires_quotes(name):
@@ -392,6 +396,7 @@ class OracleDialect_cx_oracle(OracleDialect):
oracle.RAW: _OracleRaw,
sqltypes.Unicode: _OracleNVarChar,
sqltypes.NVARCHAR : _OracleNVarChar,
+ oracle.ROWID: _OracleRowid,
}
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 70ad01914..a02e90aba 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -911,6 +911,14 @@ class Connection(Connectable):
raise exc.ResourceClosedError("This Connection is closed")
@property
+ def _connection_is_valid(self):
+ # use getattr() for is_valid to support exceptions raised in
+ # dialect initializer, where the connection is not wrapped in
+ # _ConnectionFairy
+
+ return getattr(self.__connection, 'is_valid', False)
+
+ @property
def info(self):
"""A collection of per-DB-API connection instance properties."""
@@ -954,15 +962,18 @@ class Connection(Connectable):
operations in a non-transactional state.
"""
-
+ if self.invalidated:
+ return
+
if self.closed:
raise exc.ResourceClosedError("This Connection is closed")
- if self.__connection.is_valid:
+ if self._connection_is_valid:
self.__connection.invalidate(exception)
del self.__connection
self.__invalid = True
-
+
+
def detach(self):
"""Detach the underlying DB-API connection from its connection pool.
@@ -1055,11 +1066,8 @@ class Connection(Connectable):
raise
def _rollback_impl(self):
- # use getattr() for is_valid to support exceptions raised in
- # dialect initializer,
- # where we do not yet have the pool wrappers plugged in
if not self.closed and not self.invalidated and \
- getattr(self.__connection, 'is_valid', False):
+ self._connection_is_valid:
if self._echo:
self.engine.logger.info("ROLLBACK")
try:
@@ -1085,37 +1093,37 @@ class Connection(Connectable):
if name is None:
self.__savepoint_seq += 1
name = 'sa_savepoint_%s' % self.__savepoint_seq
- if self.__connection.is_valid:
+ if self._connection_is_valid:
self.engine.dialect.do_savepoint(self, name)
return name
def _rollback_to_savepoint_impl(self, name, context):
- if self.__connection.is_valid:
+ if self._connection_is_valid:
self.engine.dialect.do_rollback_to_savepoint(self, name)
self.__transaction = context
def _release_savepoint_impl(self, name, context):
- if self.__connection.is_valid:
+ if self._connection_is_valid:
self.engine.dialect.do_release_savepoint(self, name)
self.__transaction = context
def _begin_twophase_impl(self, xid):
- if self.__connection.is_valid:
+ if self._connection_is_valid:
self.engine.dialect.do_begin_twophase(self, xid)
def _prepare_twophase_impl(self, xid):
- if self.__connection.is_valid:
+ if self._connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
self.engine.dialect.do_prepare_twophase(self, xid)
def _rollback_twophase_impl(self, xid, is_prepared):
- if self.__connection.is_valid:
+ if self._connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
self.__transaction = None
def _commit_twophase_impl(self, xid, is_prepared):
- if self.__connection.is_valid:
+ if self._connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
self.__transaction = None
diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py
index e40ba3ec4..b1b083e80 100755
--- a/lib/sqlalchemy/ext/declarative.py
+++ b/lib/sqlalchemy/ext/declarative.py
@@ -251,7 +251,8 @@ Similarly, :func:`comparable_using` is a front end for the
Defining SQL Expressions
========================
-The usage of :func:`.column_property` with Declarative is
+The usage of :func:`.column_property` with Declarative to define
+load-time, mapped SQL expressions is
pretty much the same as that described in
:ref:`mapper_sql_expressions`. Local columns within the same
class declaration can be referenced directly::
@@ -364,7 +365,9 @@ and simply pass it to declarative classes::
Some configuration schemes may find it more appropriate to use ``__table__``,
such as those which already take advantage of the data-driven nature of
-:class:`.Table` to customize and/or automate schema definition.
+:class:`.Table` to customize and/or automate schema definition. See
+the wiki example `NamingConventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_
+for one such example.
Mapper Configuration
====================
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index c74fabacd..17d967db4 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -51,7 +51,7 @@ from sqlalchemy.orm.mapper import reconstructor, validates
from sqlalchemy.orm import strategies
from sqlalchemy.orm.query import AliasOption, Query
from sqlalchemy.sql import util as sql_util
-from sqlalchemy.orm.session import Session as _Session
+from sqlalchemy.orm.session import Session
from sqlalchemy.orm.session import object_session, sessionmaker, \
make_transient
from sqlalchemy.orm.scoping import ScopedSession
@@ -66,6 +66,7 @@ __all__ = (
'Validator',
'PropComparator',
'Query',
+ 'Session',
'aliased',
'backref',
'class_mapper',
@@ -115,8 +116,7 @@ def scoped_session(session_factory, scopefunc=None):
:class:`~sqlalchemy.orm.scoping.ScopedSession`.
:param session_factory: a callable function that produces
- :class:`Session` instances, such as :func:`sessionmaker` or
- :func:`create_session`.
+ :class:`Session` instances, such as :func:`sessionmaker`.
:param scopefunc: optional, TODO
@@ -141,7 +141,12 @@ def scoped_session(session_factory, scopefunc=None):
return ScopedSession(session_factory, scopefunc=scopefunc)
def create_session(bind=None, **kwargs):
- """Create a new :class:`~sqlalchemy.orm.session.Session`.
+ """Create a new :class:`.Session`
+ with no automation enabled by default.
+
+ This function is used primarily for testing. The usual
+ route to :class:`.Session` creation is via its constructor
+ or the :func:`.sessionmaker` function.
:param bind: optional, a single Connectable to use for all
database access in the created
@@ -169,7 +174,7 @@ def create_session(bind=None, **kwargs):
kwargs.setdefault('autoflush', False)
kwargs.setdefault('autocommit', True)
kwargs.setdefault('expire_on_commit', False)
- return _Session(bind=bind, **kwargs)
+ return Session(bind=bind, **kwargs)
def relationship(argument, secondary=None, **kwargs):
"""Provide a relationship of a primary Mapper to a secondary Mapper.
@@ -666,22 +671,27 @@ def mapper(class_, local_table=None, *args, **params):
:param concrete: If True, indicates this mapper should use concrete
table inheritance with its parent mapper.
- :param exclude_properties: A list of properties not to map. Columns
- present in the mapped table and present in this list will not be
- automatically converted into properties. Note that neither this
- option nor include_properties will allow an end-run around Python
- inheritance. If mapped class ``B`` inherits from mapped class
- ``A``, no combination of includes or excludes will allow ``B`` to
- have fewer properties than its superclass, ``A``.
+ :param exclude_properties: A list or set of string column names to
+ be excluded from mapping. As of SQLAlchemy 0.6.4, this collection
+ may also include :class:`.Column` objects. Columns named or present
+ in this list will not be automatically mapped. Note that neither
+ this option nor include_properties will allow one to circumvent plan
+ Python inheritance - if mapped class ``B`` inherits from mapped
+ class ``A``, no combination of includes or excludes will allow ``B``
+ to have fewer properties than its superclass, ``A``.
:param extension: A :class:`.MapperExtension` instance or
list of :class:`~sqlalchemy.orm.interfaces.MapperExtension`
instances which will be applied to all operations by this
:class:`~sqlalchemy.orm.mapper.Mapper`.
- :param include_properties: An inclusive list of properties to map.
- Columns present in the mapped table but not present in this list
- will not be automatically converted into properties.
+ :param include_properties: An inclusive list or set of string column
+ names to map. As of SQLAlchemy 0.6.4, this collection may also
+ include :class:`.Column` objects in order to disambiguate between
+ same-named columns in a selectable (such as a
+ :func:`~.expression.join()`). If this list is not ``None``, columns
+ present in the mapped table but not named or present in this list
+ will not be automatically mapped. See also "exclude_properties".
:param inherits: Another :class:`~sqlalchemy.orm.Mapper` for which
this :class:`~sqlalchemy.orm.Mapper` will have an inheritance
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index b5c4353b3..a9ad34239 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -129,7 +129,8 @@ def column_mapped_collection(mapping_spec):
from sqlalchemy.orm.util import _state_mapper
from sqlalchemy.orm.attributes import instance_state
- cols = [expression._only_column_elements(q) for q in util.to_list(mapping_spec)]
+ cols = [expression._only_column_elements(q, "mapping_spec")
+ for q in util.to_list(mapping_spec)]
if len(cols) == 1:
def keyfunc(value):
state = instance_state(value)
diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py
index 3ee70782d..e3cbffe98 100644
--- a/lib/sqlalchemy/orm/evaluator.py
+++ b/lib/sqlalchemy/orm/evaluator.py
@@ -35,7 +35,8 @@ class EvaluatorCompiler(object):
def visit_column(self, clause):
if 'parentmapper' in clause._annotations:
- key = clause._annotations['parentmapper']._get_col_to_prop(clause).key
+ key = clause._annotations['parentmapper'].\
+ _columntoproperty[clause].key
else:
key = clause.key
get_corresponding_attr = operator.attrgetter(key)
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index ddb08039a..7c64f20db 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -43,6 +43,7 @@ _new_mappers = False
_already_compiling = False
_none_set = frozenset([None])
+_memoized_compiled_property = util.group_expirable_memoized_property()
# a list of MapperExtensions that will be installed in all mappers by default
global_extensions = []
@@ -192,8 +193,14 @@ class Mapper(object):
else:
self.polymorphic_map = _polymorphic_map
- self.include_properties = include_properties
- self.exclude_properties = exclude_properties
+ if include_properties:
+ self.include_properties = util.to_set(include_properties)
+ else:
+ self.include_properties = None
+ if exclude_properties:
+ self.exclude_properties = util.to_set(exclude_properties)
+ else:
+ self.exclude_properties = None
self.compiled = False
@@ -210,6 +217,7 @@ class Mapper(object):
global _new_mappers
_new_mappers = True
self._log("constructed")
+ self._expire_memoizations()
finally:
_COMPILE_MUTEX.release()
@@ -274,11 +282,6 @@ class Mapper(object):
self.version_id_col = self.inherits.version_id_col
self.version_id_generator = self.inherits.version_id_generator
- for mapper in self.iterate_to_root():
- util.reset_memoized(mapper, '_equivalent_columns')
- util.reset_memoized(mapper, '_sorted_tables')
- util.reset_memoized(mapper, '_compiled_cache')
-
if self.order_by is False and \
not self.concrete and \
self.inherits.order_by is not False:
@@ -470,7 +473,7 @@ class Mapper(object):
for col in self._columntoproperty
if not hasattr(col, 'table') or
col.table not in self._cols_by_table)
-
+
# if explicit PK argument sent, add those columns to the
# primary key mappings
if self.primary_key_argument:
@@ -478,13 +481,14 @@ class Mapper(object):
if k.table not in self._pks_by_table:
self._pks_by_table[k.table] = util.OrderedSet()
self._pks_by_table[k.table].add(k)
-
- if self.mapped_table not in self._pks_by_table or \
- len(self._pks_by_table[self.mapped_table]) == 0:
- raise sa_exc.ArgumentError(
- "Mapper %s could not assemble any primary "
- "key columns for mapped table '%s'" %
- (self, self.mapped_table.description))
+
+ # otherwise, see that we got a full PK for the mapped table
+ elif self.mapped_table not in self._pks_by_table or \
+ len(self._pks_by_table[self.mapped_table]) == 0:
+ raise sa_exc.ArgumentError(
+ "Mapper %s could not assemble any primary "
+ "key columns for mapped table '%s'" %
+ (self, self.mapped_table.description))
if self.inherits and \
not self.concrete and \
@@ -525,7 +529,7 @@ class Mapper(object):
# table columns mapped to lists of MapperProperty objects
# using a list allows a single column to be defined as
# populating multiple object attributes
- self._columntoproperty = util.column_dict()
+ self._columntoproperty = _ColumnMapping(self)
# load custom properties
if self._init_properties:
@@ -536,7 +540,7 @@ class Mapper(object):
if self.inherits:
for key, prop in self.inherits._props.iteritems():
if key not in self._props and \
- not self._should_exclude(key, key, local=False):
+ not self._should_exclude(key, key, local=False, column=None):
self._adapt_inherited_property(key, prop, False)
# create properties for each column in the mapped table,
@@ -549,7 +553,8 @@ class Mapper(object):
if self._should_exclude(
column.key, column_key,
- local=self.local_table.c.contains_column(column)
+ local=self.local_table.c.contains_column(column),
+ column=column
):
continue
@@ -582,7 +587,7 @@ class Mapper(object):
% col.description)
else:
instrument = True
- if self._should_exclude(col.key, col.key, local=False):
+ if self._should_exclude(col.key, col.key, local=False, column=col):
raise sa_exc.InvalidRequestError(
"Cannot exclude or override the discriminator column %r" %
col.key)
@@ -624,8 +629,18 @@ class Mapper(object):
# existing ColumnProperty from an inheriting mapper.
# make a copy and append our column to it
prop = prop.copy()
+ else:
+ util.warn(
+ "Implicitly combining column %s with column "
+ "%s under attribute '%s'. This usage will be "
+ "prohibited in 0.7. Please configure one "
+ "or more attributes for these same-named columns "
+ "explicitly."
+ % (prop.columns[-1], column, key))
+
prop.columns.append(column)
self._log("appending to existing ColumnProperty %s" % (key))
+
elif prop is None or isinstance(prop, ConcreteInheritedProperty):
mapped_column = []
for c in columns:
@@ -790,6 +805,7 @@ class Mapper(object):
self._compile_failed = exc
raise
finally:
+ self._expire_memoizations()
_COMPILE_MUTEX.release()
def _post_configure_properties(self):
@@ -834,7 +850,11 @@ class Mapper(object):
"""
self._init_properties[key] = prop
self._configure_property(key, prop, init=self.compiled)
+ self._expire_memoizations()
+ def _expire_memoizations(self):
+ for mapper in self.iterate_to_root():
+ _memoized_compiled_property.expire_instance(mapper)
def _log(self, msg, *args):
self.logger.info(
@@ -927,7 +947,7 @@ class Mapper(object):
"""
if spec == '*':
- mappers = list(self.polymorphic_iterator())
+ mappers = list(self.self_and_descendants)
elif spec:
mappers = [_class_to_mapper(m) for m in util.to_list(spec)]
for m in mappers:
@@ -966,7 +986,7 @@ class Mapper(object):
return from_obj
- @property
+ @_memoized_compiled_property
def _single_table_criterion(self):
if self.single and \
self.inherits and \
@@ -974,18 +994,17 @@ class Mapper(object):
self.polymorphic_identity is not None:
return self.polymorphic_on.in_(
m.polymorphic_identity
- for m in self.polymorphic_iterator())
+ for m in self.self_and_descendants)
else:
return None
-
-
- @util.memoized_property
+
+ @_memoized_compiled_property
def _with_polymorphic_mappers(self):
if not self.with_polymorphic:
return [self]
return self._mappers_from_spec(*self.with_polymorphic)
- @util.memoized_property
+ @_memoized_compiled_property
def _with_polymorphic_selectable(self):
if not self.with_polymorphic:
return self.mapped_table
@@ -1010,6 +1029,11 @@ class Mapper(object):
else:
return mappers, self._selectable_from_mappers(mappers)
+ @_memoized_compiled_property
+ def _polymorphic_properties(self):
+ return tuple(self._iterate_polymorphic_properties(
+ self._with_polymorphic_mappers))
+
def _iterate_polymorphic_properties(self, mappers=None):
"""Return an iterator of MapperProperty objects which will render into
a SELECT."""
@@ -1041,7 +1065,7 @@ class Mapper(object):
"provided by the get_property() and iterate_properties "
"accessors.")
- @util.memoized_property
+ @_memoized_compiled_property
def _get_clause(self):
"""create a "get clause" based on the primary key. this is used
by query.get() and many-to-one lazyloads to load this item
@@ -1053,7 +1077,7 @@ class Mapper(object):
return sql.and_(*[k==v for (k, v) in params]), \
util.column_dict(params)
- @util.memoized_property
+ @_memoized_compiled_property
def _equivalent_columns(self):
"""Create a map of all *equivalent* columns, based on
the determination of column pairs that are equated to
@@ -1085,7 +1109,7 @@ class Mapper(object):
result[binary.right].add(binary.left)
else:
result[binary.right] = util.column_set((binary.left,))
- for mapper in self.base_mapper.polymorphic_iterator():
+ for mapper in self.base_mapper.self_and_descendants:
if mapper.inherit_condition is not None:
visitors.traverse(
mapper.inherit_condition, {},
@@ -1098,7 +1122,7 @@ class Mapper(object):
(MapperProperty, attributes.InstrumentedAttribute)) and \
hasattr(obj, '__get__')
- def _should_exclude(self, name, assigned_name, local):
+ def _should_exclude(self, name, assigned_name, local, column):
"""determine whether a particular property should be implicitly
present on the class.
@@ -1120,13 +1144,17 @@ class Mapper(object):
getattr(self.class_, assigned_name)):
return True
- if (self.include_properties is not None and
- name not in self.include_properties):
+ if self.include_properties is not None and \
+ name not in self.include_properties and \
+ (column is None or column not in self.include_properties):
self._log("not including property %s" % (name))
return True
- if (self.exclude_properties is not None and
- name in self.exclude_properties):
+ if self.exclude_properties is not None and \
+ (
+ name in self.exclude_properties or \
+ (column is not None and column in self.exclude_properties)
+ ):
self._log("excluding property %s" % (name))
return True
@@ -1159,6 +1187,22 @@ class Mapper(object):
yield m
m = m.inherits
+ @_memoized_compiled_property
+ def self_and_descendants(self):
+ """The collection including this mapper and all descendant mappers.
+
+ This includes not just the immediately inheriting mappers but
+ all their inheriting mappers as well.
+
+ """
+ descendants = []
+ stack = deque([self])
+ while stack:
+ item = stack.popleft()
+ descendants.append(item)
+ stack.extend(item._inheriting_mappers)
+ return tuple(descendants)
+
def polymorphic_iterator(self):
"""Iterate through the collection including this mapper and
all descendant mappers.
@@ -1168,14 +1212,9 @@ class Mapper(object):
To iterate through an entire hierarchy, use
``mapper.base_mapper.polymorphic_iterator()``.
-
+
"""
- stack = deque([self])
- while stack:
- item = stack.popleft()
- yield item
- stack.extend(item._inheriting_mappers)
-
+ return iter(self.self_and_descendants)
def primary_mapper(self):
"""Return the primary mapper corresponding to this mapper's class key
@@ -1239,33 +1278,15 @@ class Mapper(object):
def _primary_key_from_state(self, state):
dict_ = state.dict
- return [
- self._get_state_attr_by_column(state, dict_, column) for
- column in self.primary_key]
-
- def _get_col_to_prop(self, column):
- try:
- return self._columntoproperty[column]
- except KeyError:
- prop = self._props.get(column.key, None)
- if prop:
- raise orm_exc.UnmappedColumnError(
- "Column '%s.%s' is not available, due to "
- "conflicting property '%s':%r" %
- (column.table.name, column.name,
- column.key, prop))
- else:
- raise orm_exc.UnmappedColumnError(
- "No column %s is configured on mapper %s..." %
- (column, self))
+ return [self._get_state_attr_by_column(state, dict_, column) for
+ column in self.primary_key]
# TODO: improve names?
def _get_state_attr_by_column(self, state, dict_, column):
- return self._get_col_to_prop(column)._getattr(state, dict_, column)
+ return self._columntoproperty[column]._getattr(state, dict_, column)
def _set_state_attr_by_column(self, state, dict_, column, value):
- return self._get_col_to_prop(column).\
- _setattr(state, dict_, value, column)
+ return self._columntoproperty[column]._setattr(state, dict_, value, column)
def _get_committed_attr_by_column(self, obj, column):
state = attributes.instance_state(obj)
@@ -1274,9 +1295,8 @@ class Mapper(object):
def _get_committed_state_attr_by_column(self, state, dict_, column,
passive=False):
- return self._get_col_to_prop(column).\
- _getcommitted(state, dict_,
- column, passive=passive)
+ return self._columntoproperty[column]._getcommitted(
+ state, dict_, column, passive=passive)
def _optimized_get_statement(self, state, attribute_names):
"""assemble a WHERE clause which retrieves a given state by primary
@@ -1386,14 +1406,14 @@ class Mapper(object):
except StopIteration:
visitables.pop()
- @util.memoized_property
+ @_memoized_compiled_property
def _compiled_cache(self):
return util.LRUCache(self._compiled_cache_size)
- @util.memoized_property
+ @_memoized_compiled_property
def _sorted_tables(self):
table_to_mapper = {}
- for mapper in self.base_mapper.polymorphic_iterator():
+ for mapper in self.base_mapper.self_and_descendants:
for t in mapper.tables:
table_to_mapper[t] = mapper
@@ -2410,7 +2430,8 @@ def _load_scalar_attributes(state, attribute_names):
# this codepath is rare - only valid when inside a flush, and the
# object is becoming persistent but hasn't yet been assigned an identity_key.
# check here to ensure we have the attrs we need.
- pk_attrs = [mapper._get_col_to_prop(col).key for col in mapper.primary_key]
+ pk_attrs = [mapper._columntoproperty[col].key
+ for col in mapper.primary_key]
if state.expired_attributes.intersection(pk_attrs):
raise sa_exc.InvalidRequestError("Instance %s cannot be refreshed - it's not "
" persistent and does not "
@@ -2437,3 +2458,21 @@ def _load_scalar_attributes(state, attribute_names):
raise orm_exc.ObjectDeletedError(
"Instance '%s' has been deleted." %
state_str(state))
+
+
+class _ColumnMapping(util.py25_dict):
+ """Error reporting helper for mapper._columntoproperty."""
+
+ def __init__(self, mapper):
+ self.mapper = mapper
+
+ def __missing__(self, column):
+ prop = self.mapper._props.get(column)
+ if prop:
+ raise orm_exc.UnmappedColumnError(
+ "Column '%s.%s' is not available, due to "
+ "conflicting property '%s':%r" % (
+ column.table.name, column.name, column.key, prop))
+ raise orm_exc.UnmappedColumnError(
+ "No column %s is configured on mapper %s..." %
+ (column, self.mapper))
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 5788c30f9..263b611a5 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -60,7 +60,17 @@ class ColumnProperty(StrategizedProperty):
self.__class__.Comparator)
self.descriptor = kwargs.pop('descriptor', None)
self.extension = kwargs.pop('extension', None)
- self.doc = kwargs.pop('doc', getattr(columns[0], 'doc', None))
+
+ if 'doc' in kwargs:
+ self.doc = kwargs.pop('doc')
+ else:
+ for col in reversed(self.columns):
+ doc = getattr(col, 'doc', None)
+ if doc is not None:
+ self.doc = doc
+ break
+ else:
+ self.doc = None
if kwargs:
raise TypeError(
@@ -926,16 +936,17 @@ class RelationshipProperty(StrategizedProperty):
for attr in 'primaryjoin', 'secondaryjoin':
val = getattr(self, attr)
if val is not None:
- util.assert_arg_type(val, sql.ColumnElement, attr)
- setattr(self, attr, _orm_deannotate(val))
+ setattr(self, attr, _orm_deannotate(
+ expression._only_column_elements(val, attr))
+ )
if self.order_by is not False and self.order_by is not None:
- self.order_by = [expression._literal_as_column(x) for x in
+ self.order_by = [expression._only_column_elements(x, "order_by") for x in
util.to_list(self.order_by)]
self._user_defined_foreign_keys = \
- util.column_set(expression._literal_as_column(x) for x in
+ util.column_set(expression._only_column_elements(x, "foreign_keys") for x in
util.to_column_set(self._user_defined_foreign_keys))
self.remote_side = \
- util.column_set(expression._literal_as_column(x) for x in
+ util.column_set(expression._only_column_elements(x, "remote_side") for x in
util.to_column_set(self.remote_side))
if not self.parent.concrete:
for inheriting in self.parent.iterate_to_root():
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index c33687484..fdc426a07 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -6,11 +6,11 @@
"""The Query class and support.
-Defines the :class:`~sqlalchemy.orm.query.Query` class, the central
+Defines the :class:`.Query` class, the central
construct used by the ORM to construct database queries.
-The ``Query`` class should not be confused with the
-:class:`~sqlalchemy.sql.expression.Select` class, which defines database
+The :class:`.Query` class should not be confused with the
+:class:`.Select` class, which defines database
SELECT operations at the SQL (non-ORM) level. ``Query`` differs from
``Select`` in that it returns ORM-mapped objects and interacts with an
ORM session, whereas the ``Select`` construct interacts directly with the
@@ -596,6 +596,25 @@ class Query(object):
@_generative()
def correlate(self, *args):
+ """Return a :class:`.Query` construct which will correlate the given
+ FROM clauses to that of an enclosing :class:`.Query` or
+ :func:`~.expression.select`.
+
+ The method here accepts mapped classes, :func:`.aliased` constructs,
+ and :func:`.mapper` constructs as arguments, which are resolved into
+ expression constructs, in addition to appropriate expression
+ constructs.
+
+ The correlation arguments are ultimately passed to
+ :meth:`.Select.correlate` after coercion to expression constructs.
+
+ The correlation arguments take effect in such cases
+ as when :meth:`.Query.from_self` is used, or when
+ a subquery as returned by :meth:`.Query.subquery` is
+ embedded in another :func:`~.expression.select` construct.
+
+ """
+
self._correlate = self._correlate.union(
_orm_selectable(s)
for s in args)
@@ -2548,8 +2567,12 @@ class _MapperEntity(_QueryEntity):
)
)
- for value in self.mapper._iterate_polymorphic_properties(
- self._with_polymorphic):
+ if self._with_polymorphic:
+ poly_properties = self.mapper._iterate_polymorphic_properties(
+ self._with_polymorphic)
+ else:
+ poly_properties = self.mapper._polymorphic_properties
+ for value in poly_properties:
if query._only_load_props and \
value.key not in query._only_load_props:
continue
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index c67dc5553..4727de218 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -278,8 +278,11 @@ class SessionTransaction(object):
for s in set(self._new).union(self.session._new):
self.session._expunge_state(s)
-
+
for s in set(self._deleted).union(self.session._deleted):
+ if s.deleted:
+ # assert s in self._deleted
+ del s.deleted
self.session._update_impl(s)
assert not self.session._deleted
@@ -494,8 +497,8 @@ class Session(object):
issue any SQL in order to load collections or attributes which are not
yet loaded, or were marked as "expired".
- The session methods which control instance state include ``add()``,
- ``delete()``, ``merge()``, and ``expunge()``.
+ The session methods which control instance state include :meth:`.Session.add`,
+ :meth:`.Session.delete`, :meth:`.Session.merge`, and :meth:`.Session.expunge`.
The Session object is generally **not** threadsafe. A session which is
set to ``autocommit`` and is only read from may be used by concurrent
@@ -1102,6 +1105,7 @@ class Session(object):
self.identity_map.discard(state)
self._deleted.pop(state, None)
+ state.deleted = True
def _save_without_cascade(self, instance):
"""Used by scoping.py to save on init without cascade."""
@@ -1309,7 +1313,13 @@ class Session(object):
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" %
mapperutil.state_str(state))
-
+
+ if state.deleted:
+ raise sa_exc.InvalidRequestError(
+ "Instance '%s' has been deleted. Use the make_transient() "
+ "function to send this object back to the transient state." %
+ mapperutil.state_str(state)
+ )
self._attach(state)
self._deleted.pop(state, None)
self.identity_map.add(state)
@@ -1655,7 +1665,9 @@ def make_transient(instance):
This will remove its association with any
session and additionally will remove its "identity key",
such that it's as though the object were newly constructed,
- except retaining its values.
+ except retaining its values. It also resets the
+ "deleted" flag on the state if this object
+ had been explicitly deleted by its session.
Attributes which were "expired" or deferred at the
instance level are reverted to undefined, and
@@ -1670,13 +1682,23 @@ def make_transient(instance):
# remove expired state and
# deferred callables
state.callables.clear()
- del state.key
-
+ if state.key:
+ del state.key
+ if state.deleted:
+ del state.deleted
def object_session(instance):
- """Return the ``Session`` to which instance belongs, or None."""
+ """Return the ``Session`` to which instance belongs.
+
+ If the instance is not a mapped instance, an error is raised.
- return _state_session(attributes.instance_state(instance))
+ """
+
+ try:
+ return _state_session(attributes.instance_state(instance))
+ except exc.NO_STATE:
+ raise exc.UnmappedInstanceError(instance)
+
def _state_session(state):
if state.session_id:
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index a02f2dafd..a5f1ed340 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -22,6 +22,7 @@ class InstanceState(object):
_strong_obj = None
modified = False
expired = False
+ deleted = False
def __init__(self, obj, manager):
self.class_ = obj.__class__
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 96e6ff627..3778bb2f9 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -49,7 +49,7 @@ def _register_attribute(strategy, mapper, useobject,
attribute_ext.append(sessionlib.UOWEventHandler(prop.key))
- for m in mapper.polymorphic_iterator():
+ for m in mapper.self_and_descendants:
if prop is m._props.get(prop.key):
attributes.register_attribute_impl(
@@ -119,17 +119,20 @@ class ColumnLoader(LoaderStrategy):
)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- key, col = self.key, self.columns[0]
- if adapter:
- col = adapter.columns[col]
-
- if col is not None and col in row:
- def new_execute(state, dict_, row):
- dict_[key] = row[col]
+ key = self.key
+ # look through list of columns represented here
+ # to see which, if any, is present in the row.
+ for col in self.columns:
+ if adapter:
+ col = adapter.columns[col]
+ if col is not None and col in row:
+ def new_execute(state, dict_, row):
+ dict_[key] = row[col]
+ return new_execute, None
else:
def new_execute(state, dict_, row):
state.expire_attribute_pre_commit(dict_, key)
- return new_execute, None
+ return new_execute, None
log.class_logger(ColumnLoader)
@@ -694,7 +697,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
leftmost_cols, remote_cols = self._local_remote_columns(leftmost_prop)
leftmost_attr = [
- leftmost_mapper._get_col_to_prop(c).class_attribute
+ leftmost_mapper._columntoproperty[c].class_attribute
for c in leftmost_cols
]
@@ -741,7 +744,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
self._local_remote_columns(self.parent_property)
local_attr = [
- getattr(parent_alias, self.parent._get_col_to_prop(c).key)
+ getattr(parent_alias, self.parent._columntoproperty[c].key)
for c in local_cols
]
q = q.order_by(*local_attr)
@@ -823,7 +826,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
local_cols, remote_cols = self._local_remote_columns(self.parent_property)
remote_attr = [
- self.mapper._get_col_to_prop(c).key
+ self.mapper._columntoproperty[c].key
for c in remote_cols]
q = context.attributes[('subquery', path)]
@@ -941,7 +944,7 @@ class EagerLoader(AbstractRelationshipLoader):
("eager_row_processor", reduced_path)
] = clauses
- for value in self.mapper._iterate_polymorphic_properties():
+ for value in self.mapper._polymorphic_properties:
value.setup(
context,
entity,
diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py
index 3b2a291bd..05298767d 100644
--- a/lib/sqlalchemy/orm/sync.py
+++ b/lib/sqlalchemy/orm/sync.py
@@ -71,7 +71,7 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
"""
for l, r in synchronize_pairs:
try:
- prop = source_mapper._get_col_to_prop(l)
+ prop = source_mapper._columntoproperty[l]
except exc.UnmappedColumnError:
_raise_col_to_prop(False, source_mapper, l, None, r)
history = uowcommit.get_attribute_history(source, prop.key, passive=True)
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index e10891924..830ac3c0c 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -219,7 +219,7 @@ class UOWTransaction(object):
def states_for_mapper_hierarchy(self, mapper, isdelete, listonly):
checktup = (isdelete, listonly)
- for mapper in mapper.base_mapper.polymorphic_iterator():
+ for mapper in mapper.base_mapper.self_and_descendants:
for state in self.mappers[mapper]:
if self.states[state] == checktup:
yield state
@@ -318,11 +318,11 @@ class IterateMappersMixin(object):
def _mappers(self, uow):
if self.fromparent:
return iter(
- m for m in self.dependency_processor.parent.polymorphic_iterator()
+ m for m in self.dependency_processor.parent.self_and_descendants
if uow._mapper_for_dep[(m, self.dependency_processor)]
)
else:
- return self.dependency_processor.mapper.polymorphic_iterator()
+ return self.dependency_processor.mapper.self_and_descendants
class Preprocess(IterateMappersMixin):
def __init__(self, dependency_processor, fromparent):
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index a7f5d396a..d184816ab 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -1037,12 +1037,12 @@ def _no_literals(element):
else:
return element
-def _only_column_elements(element):
+def _only_column_elements(element, name):
if hasattr(element, '__clause_element__'):
element = element.__clause_element__()
if not isinstance(element, ColumnElement):
- raise exc.ArgumentError("Column-based expression object expected; "
- "got: %r" % element)
+ raise exc.ArgumentError("Column-based expression object expected for argument '%s'; "
+ "got: '%s', type %s" % (name, element, type(element)))
return element
def _corresponding_column_or_error(fromclause, column,
@@ -4045,15 +4045,15 @@ class Select(_SelectBaseMixin, FromClause):
"""return a new select() construct which will correlate the given FROM
clauses to that of an enclosing select(), if a match is found.
- By "match", the given fromclause must be present in this select's
+ By "match", the given fromclause must be present in this select's
list of FROM objects and also present in an enclosing select's list of
FROM objects.
- Calling this method turns off the select's default behavior of
+ Calling this method turns off the select's default behavior of
"auto-correlation". Normally, select() auto-correlates all of its FROM
clauses to those of an embedded select when compiled.
- If the fromclause is None, correlation is disabled for the returned
+ If the fromclause is None, correlation is disabled for the returned
select().
"""
diff --git a/lib/sqlalchemy/test/requires.py b/lib/sqlalchemy/test/requires.py
index fefb00330..501f0e24d 100644
--- a/lib/sqlalchemy/test/requires.py
+++ b/lib/sqlalchemy/test/requires.py
@@ -257,7 +257,7 @@ def reflects_pk_names(fn):
"""Target driver reflects the name of primary key constraints."""
return _chain_decorators_on(
fn,
- fails_on_everything_except('postgresql')
+ fails_on_everything_except('postgresql', 'oracle')
)
def python2(fn):
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index 777353714..f00cc2e3c 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -16,9 +16,9 @@ __all__ = [ 'TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType',
'FLOAT', 'NUMERIC', 'DECIMAL', 'TIMESTAMP', 'DATETIME', 'CLOB',
'BLOB', 'BOOLEAN', 'SMALLINT', 'INTEGER', 'DATE', 'TIME',
'String', 'Integer', 'SmallInteger', 'BigInteger', 'Numeric',
- 'Float', 'DateTime', 'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean',
- 'Unicode', 'MutableType', 'Concatenable', 'UnicodeText',
- 'PickleType', 'Interval', 'type_map', 'Enum' ]
+ 'Float', 'DateTime', 'Date', 'Time', 'LargeBinary', 'Binary',
+ 'Boolean', 'Unicode', 'MutableType', 'Concatenable',
+ 'UnicodeText','PickleType', 'Interval', 'type_map', 'Enum' ]
import inspect
import datetime as dt
@@ -35,18 +35,13 @@ from sqlalchemy import util
from sqlalchemy import processors
import collections
+DefaultDialect = None
NoneType = type(None)
if util.jython:
import array
class AbstractType(Visitable):
- def __init__(self, *args, **kwargs):
- pass
-
- def compile(self, dialect):
- return dialect.type_compiler.process(self)
-
def copy_value(self, value):
return value
@@ -107,7 +102,8 @@ class AbstractType(Visitable):
@util.memoized_property
def _type_affinity(self):
- """Return a rudimental 'affinity' value expressing the general class of type."""
+ """Return a rudimental 'affinity' value expressing the general class
+ of type."""
typ = None
for t in self.__class__.__mro__:
@@ -120,7 +116,8 @@ class AbstractType(Visitable):
def _coerce_compared_value(self, op, value):
_coerced_type = type_map.get(type(value), NULLTYPE)
- if _coerced_type is NULLTYPE or _coerced_type._type_affinity is self._type_affinity:
+ if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
+ is self._type_affinity:
return self
else:
return _coerced_type
@@ -128,6 +125,30 @@ class AbstractType(Visitable):
def _compare_type_affinity(self, other):
return self._type_affinity is other._type_affinity
+ def compile(self, dialect=None):
+ # arg, return value is inconsistent with
+ # ClauseElement.compile()....this is a mistake.
+
+ if not dialect:
+ global DefaultDialect
+ if DefaultDialect is None:
+ from sqlalchemy.engine.default import DefaultDialect
+ dialect = DefaultDialect()
+
+ return dialect.type_compiler.process(self)
+
+ def __str__(self):
+ # Py3K
+ #return unicode(self.compile())
+ # Py2K
+ return unicode(self.compile()).encode('ascii', 'backslashreplace')
+ # end Py2K
+
+ def __init__(self, *args, **kwargs):
+ # supports getargspec of the __init__ method
+ # used by generic __repr__
+ pass
+
def __repr__(self):
return "%s(%s)" % (
self.__class__.__name__,
@@ -142,12 +163,12 @@ class TypeEngine(AbstractType):
return {}
def dialect_impl(self, dialect, **kwargs):
- key = (dialect.__class__, dialect.server_version_info)
-
+ key = dialect.__class__, dialect.server_version_info
try:
return self._impl_dict[key]
except KeyError:
- return self._impl_dict.setdefault(key, dialect.type_descriptor(self))
+ return self._impl_dict.setdefault(key,
+ dialect.type_descriptor(self))
def __getstate__(self):
d = self.__dict__.copy()
@@ -268,11 +289,11 @@ class TypeDecorator(AbstractType):
given; in this case, the "impl" variable can reference
``TypeEngine`` as a placeholder.
- Types that receive a Python type that isn't similar to the
- ultimate type used may want to define the :meth:`TypeDecorator.coerce_compared_value`
- method. This is used to give the expression system a hint
- when coercing Python objects into bind parameters within expressions.
- Consider this expression::
+ Types that receive a Python type that isn't similar to the ultimate type
+ used may want to define the :meth:`TypeDecorator.coerce_compared_value`
+ method. This is used to give the expression system a hint when coercing
+ Python objects into bind parameters within expressions. Consider this
+ expression::
mytable.c.somecol + datetime.date(2009, 5, 15)
@@ -282,10 +303,10 @@ class TypeDecorator(AbstractType):
The expression system does the right thing by not attempting to
coerce the "date()" value into an integer-oriented bind parameter.
- However, in the case of ``TypeDecorator``, we are usually changing
- an incoming Python type to something new - ``TypeDecorator`` by
- default will "coerce" the non-typed side to be the same type as itself.
- Such as below, we define an "epoch" type that stores a date value as an integer::
+ However, in the case of ``TypeDecorator``, we are usually changing an
+ incoming Python type to something new - ``TypeDecorator`` by default will
+ "coerce" the non-typed side to be the same type as itself. Such as below,
+ we define an "epoch" type that stores a date value as an integer::
class MyEpochType(types.TypeDecorator):
impl = types.Integer
@@ -301,10 +322,11 @@ class TypeDecorator(AbstractType):
Our expression of ``somecol + date`` with the above type will coerce the
"date" on the right side to also be treated as ``MyEpochType``.
- This behavior can be overridden via the :meth:`~TypeDecorator.coerce_compared_value`
- method, which returns a type that should be used for the value of the expression.
- Below we set it such that an integer value will be treated as an ``Integer``,
- and any other value is assumed to be a date and will be treated as a ``MyEpochType``::
+ This behavior can be overridden via the
+ :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
+ that should be used for the value of the expression. Below we set it such
+ that an integer value will be treated as an ``Integer``, and any other
+ value is assumed to be a date and will be treated as a ``MyEpochType``::
def coerce_compared_value(self, op, value):
if isinstance(value, int):
@@ -318,8 +340,10 @@ class TypeDecorator(AbstractType):
def __init__(self, *args, **kwargs):
if not hasattr(self.__class__, 'impl'):
- raise AssertionError("TypeDecorator implementations require a class-level "
- "variable 'impl' which refers to the class of type being decorated")
+ raise AssertionError("TypeDecorator implementations "
+ "require a class-level variable "
+ "'impl' which refers to the class of "
+ "type being decorated")
self.impl = self.__class__.impl(*args, **kwargs)
def adapt(self, cls):
@@ -346,8 +370,10 @@ class TypeDecorator(AbstractType):
typedesc = self.load_dialect_impl(dialect)
tt = self.copy()
if not isinstance(tt, self.__class__):
- raise AssertionError("Type object %s does not properly implement the copy() "
- "method, it must return an object of type %s" % (self, self.__class__))
+ raise AssertionError('Type object %s does not properly '
+ 'implement the copy() method, it must '
+ 'return an object of type %s' % (self,
+ self.__class__))
tt.impl = typedesc
self._impl_dict[key] = tt
return tt
@@ -376,7 +402,8 @@ class TypeDecorator(AbstractType):
return dialect.type_descriptor(self.impl)
def __getattr__(self, key):
- """Proxy all other undefined accessors to the underlying implementation."""
+ """Proxy all other undefined accessors to the underlying
+ implementation."""
return getattr(self.impl, key)
@@ -387,29 +414,36 @@ class TypeDecorator(AbstractType):
raise NotImplementedError()
def bind_processor(self, dialect):
- if self.__class__.process_bind_param.func_code is not TypeDecorator.process_bind_param.func_code:
+ if self.__class__.process_bind_param.func_code \
+ is not TypeDecorator.process_bind_param.func_code:
process_param = self.process_bind_param
impl_processor = self.impl.bind_processor(dialect)
if impl_processor:
def process(value):
return impl_processor(process_param(value, dialect))
+
else:
def process(value):
return process_param(value, dialect)
+
return process
else:
return self.impl.bind_processor(dialect)
def result_processor(self, dialect, coltype):
- if self.__class__.process_result_value.func_code is not TypeDecorator.process_result_value.func_code:
+ if self.__class__.process_result_value.func_code \
+ is not TypeDecorator.process_result_value.func_code:
process_value = self.process_result_value
- impl_processor = self.impl.result_processor(dialect, coltype)
+ impl_processor = self.impl.result_processor(dialect,
+ coltype)
if impl_processor:
def process(value):
return process_value(impl_processor(value), dialect)
+
else:
def process(value):
return process_value(value, dialect)
+
return process
else:
return self.impl.result_processor(dialect, coltype)
@@ -593,10 +627,12 @@ class NullType(TypeEngine):
NullTypeEngine = NullType
class Concatenable(object):
- """A mixin that marks a type as supporting 'concatenation', typically strings."""
+ """A mixin that marks a type as supporting 'concatenation',
+ typically strings."""
def _adapt_expression(self, op, othertype):
- if op is operators.add and issubclass(othertype._type_affinity, (Concatenable, NullType)):
+ if op is operators.add and issubclass(othertype._type_affinity,
+ (Concatenable, NullType)):
return operators.concat_op, self
else:
return op, self
@@ -604,8 +640,9 @@ class Concatenable(object):
class _DateAffinity(object):
"""Mixin date/time specific expression adaptations.
- Rules are implemented within Date,Time,Interval,DateTime, Numeric, Integer.
- Based on http://www.postgresql.org/docs/current/static/functions-datetime.html.
+ Rules are implemented within Date,Time,Interval,DateTime, Numeric,
+ Integer. Based on http://www.postgresql.org/docs/current/static
+ /functions-datetime.html.
"""
@@ -673,11 +710,11 @@ class String(Concatenable, TypeEngine):
set convert_unicode='force'. This will incur significant
performance overhead when fetching unicode result columns.
- :param assert_unicode: Deprecated. A warning is raised in all cases when a non-Unicode
- object is passed when SQLAlchemy would coerce into an encoding
- (note: but **not** when the DBAPI handles unicode objects natively).
- To suppress or raise this warning to an
- error, use the Python warnings filter documented at:
+ :param assert_unicode: Deprecated. A warning is raised in all cases
+ when a non-Unicode object is passed when SQLAlchemy would coerce
+ into an encoding (note: but **not** when the DBAPI handles unicode
+ objects natively). To suppress or raise this warning to an error,
+ use the Python warnings filter documented at:
http://docs.python.org/library/warnings.html
:param unicode_error: Optional, a method to use to handle Unicode
@@ -699,12 +736,14 @@ class String(Concatenable, TypeEngine):
"when unicode_error is set.")
if assert_unicode:
- util.warn_deprecated("assert_unicode is deprecated. "
- "SQLAlchemy emits a warning in all cases where it "
- "would otherwise like to encode a Python unicode object "
- "into a specific encoding but a plain bytestring is received. "
- "This does *not* apply to DBAPIs that coerce Unicode natively."
- )
+ util.warn_deprecated('assert_unicode is deprecated. '
+ 'SQLAlchemy emits a warning in all '
+ 'cases where it would otherwise like '
+ 'to encode a Python unicode object '
+ 'into a specific encoding but a plain '
+ 'bytestring is received. This does '
+ '*not* apply to DBAPIs that coerce '
+ 'Unicode natively.')
self.length = length
self.convert_unicode = convert_unicode
self.unicode_error = unicode_error
@@ -720,7 +759,8 @@ class String(Concatenable, TypeEngine):
def bind_processor(self, dialect):
if self.convert_unicode or dialect.convert_unicode:
- if dialect.supports_unicode_binds and self.convert_unicode != 'force':
+ if dialect.supports_unicode_binds and \
+ self.convert_unicode != 'force':
if self._warn_on_bytestring:
def process(value):
# Py3K
@@ -948,7 +988,8 @@ class Numeric(_DateAffinity, TypeEngine):
"""
Construct a Numeric.
- :param precision: the numeric precision for use in DDL ``CREATE TABLE``.
+ :param precision: the numeric precision for use in DDL ``CREATE
+ TABLE``.
:param scale: the numeric scale for use in DDL ``CREATE TABLE``.
@@ -1000,18 +1041,21 @@ class Numeric(_DateAffinity, TypeEngine):
# we're a "numeric", DBAPI will give us Decimal directly
return None
else:
- util.warn("Dialect %s+%s does *not* support Decimal objects natively, "
- "and SQLAlchemy must convert from floating point - "
- "rounding errors and other issues may occur. "
- "Please consider storing Decimal numbers as strings or "
- "integers on this platform for lossless storage." %
- (dialect.name, dialect.driver))
+ util.warn('Dialect %s+%s does *not* support Decimal '
+ 'objects natively, and SQLAlchemy must '
+ 'convert from floating point - rounding '
+ 'errors and other issues may occur. Please '
+ 'consider storing Decimal numbers as strings '
+ 'or integers on this platform for lossless '
+ 'storage.' % (dialect.name, dialect.driver))
# we're a "numeric", DBAPI returns floats, convert.
if self.scale is not None:
- return processors.to_decimal_processor_factory(_python_Decimal, self.scale)
+ return processors.to_decimal_processor_factory(
+ _python_Decimal, self.scale)
else:
- return processors.to_decimal_processor_factory(_python_Decimal)
+ return processors.to_decimal_processor_factory(
+ _python_Decimal)
else:
if dialect.supports_native_decimal:
return processors.to_float
@@ -1060,7 +1104,8 @@ class Float(Numeric):
"""
Construct a Float.
- :param precision: the numeric precision for use in DDL ``CREATE TABLE``.
+ :param precision: the numeric precision for use in DDL ``CREATE
+ TABLE``.
:param asdecimal: the same flag as that of :class:`Numeric`, but
defaults to ``False``. Note that setting this flag to ``True``
@@ -1277,7 +1322,8 @@ class Binary(LargeBinary):
"""Deprecated. Renamed to LargeBinary."""
def __init__(self, *arg, **kw):
- util.warn_deprecated("The Binary type has been renamed to LargeBinary.")
+ util.warn_deprecated('The Binary type has been renamed to '
+ 'LargeBinary.')
LargeBinary.__init__(self, *arg, **kw)
class SchemaType(object):
@@ -1295,36 +1341,26 @@ class SchemaType(object):
self.schema = kw.pop('schema', None)
self.metadata = kw.pop('metadata', None)
if self.metadata:
- self.metadata.append_ddl_listener(
- 'before-create',
- util.portable_instancemethod(self._on_metadata_create)
- )
- self.metadata.append_ddl_listener(
- 'after-drop',
- util.portable_instancemethod(self._on_metadata_drop)
- )
+ self.metadata.append_ddl_listener('before-create',
+ util.portable_instancemethod(self._on_metadata_create))
+ self.metadata.append_ddl_listener('after-drop',
+ util.portable_instancemethod(self._on_metadata_drop))
def _set_parent(self, column):
column._on_table_attach(util.portable_instancemethod(self._set_table))
def _set_table(self, table, column):
- table.append_ddl_listener(
- 'before-create',
- util.portable_instancemethod(self._on_table_create)
- )
- table.append_ddl_listener(
- 'after-drop',
- util.portable_instancemethod(self._on_table_drop)
- )
+ table.append_ddl_listener('before-create',
+ util.portable_instancemethod(
+ self._on_table_create))
+ table.append_ddl_listener('after-drop',
+ util.portable_instancemethod(
+ self._on_table_drop))
if self.metadata is None:
- table.metadata.append_ddl_listener(
- 'before-create',
- util.portable_instancemethod(self._on_metadata_create)
- )
- table.metadata.append_ddl_listener(
- 'after-drop',
- util.portable_instancemethod(self._on_metadata_drop)
- )
+ table.metadata.append_ddl_listener('before-create',
+ util.portable_instancemethod(self._on_metadata_create))
+ table.metadata.append_ddl_listener('after-drop',
+ util.portable_instancemethod(self._on_metadata_drop))
@property
def bind(self):
@@ -1386,40 +1422,42 @@ class Enum(String, SchemaType):
Keyword arguments which don't apply to a specific backend are ignored
by that backend.
- :param \*enums: string or unicode enumeration labels. If unicode labels
- are present, the `convert_unicode` flag is auto-enabled.
-
- :param convert_unicode: Enable unicode-aware bind parameter and result-set
- processing for this Enum's data. This is set automatically based on
- the presence of unicode label strings.
-
- :param metadata: Associate this type directly with a ``MetaData`` object.
- For types that exist on the target database as an independent schema
- construct (Postgresql), this type will be created and dropped within
- ``create_all()`` and ``drop_all()`` operations. If the type is not
- associated with any ``MetaData`` object, it will associate itself with
- each ``Table`` in which it is used, and will be created when any of
- those individual tables are created, after a check is performed for
- it's existence. The type is only dropped when ``drop_all()`` is called
- for that ``Table`` object's metadata, however.
-
- :param name: The name of this type. This is required for Postgresql and
- any future supported database which requires an explicitly named type,
- or an explicitly named constraint in order to generate the type and/or
- a table that uses it.
-
- :param native_enum: Use the database's native ENUM type when available.
- Defaults to True. When False, uses VARCHAR + check constraint
- for all backends.
-
- :param schema: Schemaname of this type. For types that exist on the target
- database as an independent schema construct (Postgresql), this
- parameter specifies the named schema in which the type is present.
-
- :param quote: Force quoting to be on or off on the type's name. If left as
- the default of `None`, the usual schema-level "case
- sensitive"/"reserved name" rules are used to determine if this type's
- name should be quoted.
+ :param \*enums: string or unicode enumeration labels. If unicode
+ labels are present, the `convert_unicode` flag is auto-enabled.
+
+ :param convert_unicode: Enable unicode-aware bind parameter and
+ result-set processing for this Enum's data. This is set
+ automatically based on the presence of unicode label strings.
+
+ :param metadata: Associate this type directly with a ``MetaData``
+ object. For types that exist on the target database as an
+ independent schema construct (Postgresql), this type will be
+ created and dropped within ``create_all()`` and ``drop_all()``
+ operations. If the type is not associated with any ``MetaData``
+ object, it will associate itself with each ``Table`` in which it is
+ used, and will be created when any of those individual tables are
+ created, after a check is performed for it's existence. The type is
+ only dropped when ``drop_all()`` is called for that ``Table``
+ object's metadata, however.
+
+ :param name: The name of this type. This is required for Postgresql
+ and any future supported database which requires an explicitly
+ named type, or an explicitly named constraint in order to generate
+ the type and/or a table that uses it.
+
+ :param native_enum: Use the database's native ENUM type when
+ available. Defaults to True. When False, uses VARCHAR + check
+ constraint for all backends.
+
+ :param schema: Schemaname of this type. For types that exist on the
+ target database as an independent schema construct (Postgresql),
+ this parameter specifies the named schema in which the type is
+ present.
+
+ :param quote: Force quoting to be on or off on the type's name. If
+ left as the default of `None`, the usual schema-level "case
+ sensitive"/"reserved name" rules are used to determine if this
+ type's name should be quoted.
"""
self.enums = enums
@@ -1455,7 +1493,8 @@ class Enum(String, SchemaType):
e = schema.CheckConstraint(
column.in_(self.enums),
name=self.name,
- _create_rule=util.portable_instancemethod(self._should_create_constraint)
+ _create_rule=util.portable_instancemethod(
+ self._should_create_constraint)
)
table.append_constraint(e)
@@ -1487,7 +1526,8 @@ class PickleType(MutableType, TypeDecorator):
impl = LargeBinary
- def __init__(self, protocol=pickle.HIGHEST_PROTOCOL, pickler=None, mutable=True, comparator=None):
+ def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
+ pickler=None, mutable=True, comparator=None):
"""
Construct a PickleType.
@@ -1548,7 +1588,8 @@ class PickleType(MutableType, TypeDecorator):
def copy_value(self, value):
if self.mutable:
- return self.pickler.loads(self.pickler.dumps(value, self.protocol))
+ return self.pickler.loads(
+ self.pickler.dumps(value, self.protocol))
else:
return value
@@ -1596,7 +1637,8 @@ class Boolean(TypeEngine, SchemaType):
e = schema.CheckConstraint(
column.in_([0, 1]),
name=self.name,
- _create_rule=util.portable_instancemethod(self._should_create_constraint)
+ _create_rule=util.portable_instancemethod(
+ self._should_create_constraint)
)
table.append_constraint(e)
@@ -1614,12 +1656,12 @@ class Interval(_DateAffinity, TypeDecorator):
value is stored as a date which is relative to the "epoch"
(Jan. 1, 1970).
- Note that the ``Interval`` type does not currently provide
- date arithmetic operations on platforms which do not support
- interval types natively. Such operations usually require
- transformation of both sides of the expression (such as, conversion
- of both sides into integer epoch values first) which currently
- is a manual procedure (such as via :attr:`~sqlalchemy.sql.expression.func`).
+ Note that the ``Interval`` type does not currently provide date arithmetic
+ operations on platforms which do not support interval types natively. Such
+ operations usually require transformation of both sides of the expression
+ (such as, conversion of both sides into integer epoch values first) which
+ currently is a manual procedure (such as via
+ :attr:`~sqlalchemy.sql.expression.func`).
"""
@@ -1842,6 +1884,9 @@ STRINGTYPE = String()
# using VARCHAR/NCHAR so that we dont get the genericized "String"
# type which usually resolves to TEXT/CLOB
+# NOTE: this dict is not meant to be public and will be underscored
+# in 0.7, see [ticket:1870].
+
type_map = {
str: String(),
# Py3K
diff --git a/lib/sqlalchemy/util.py b/lib/sqlalchemy/util.py
index d13a2685c..d5227b447 100644
--- a/lib/sqlalchemy/util.py
+++ b/lib/sqlalchemy/util.py
@@ -176,6 +176,32 @@ class frozendict(dict):
def __repr__(self):
return "frozendict(%s)" % dict.__repr__(self)
+
+# find or create a dict implementation that supports __missing__
+class _probe(dict):
+ def __missing__(self, key):
+ return 1
+
+try:
+ try:
+ _probe()['missing']
+ py25_dict = dict
+ except KeyError:
+ class py25_dict(dict):
+ def __getitem__(self, key):
+ try:
+ return dict.__getitem__(self, key)
+ except KeyError:
+ try:
+ missing = self.__missing__
+ except AttributeError:
+ raise KeyError(key)
+ else:
+ return missing(key)
+finally:
+ del _probe
+
+
def to_list(x, default=None):
if x is None:
return default
@@ -1437,6 +1463,7 @@ def function_named(fn, name):
fn.func_defaults, fn.func_closure)
return fn
+
class memoized_property(object):
"""A read-only @property that is only evaluated once."""
def __init__(self, fget, doc=None):
@@ -1481,6 +1508,24 @@ class memoized_instancemethod(object):
def reset_memoized(instance, name):
instance.__dict__.pop(name, None)
+
+class group_expirable_memoized_property(object):
+ """A family of @memoized_properties that can be expired in tandem."""
+
+ def __init__(self):
+ self.attributes = []
+
+ def expire_instance(self, instance):
+ """Expire all memoized properties for *instance*."""
+ stash = instance.__dict__
+ for attribute in self.attributes:
+ stash.pop(attribute, None)
+
+ def __call__(self, fn):
+ self.attributes.append(fn.__name__)
+ return memoized_property(fn)
+
+
class WeakIdentityMapping(weakref.WeakKeyDictionary):
"""A WeakKeyDictionary with an object identity index.