summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/ext
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/ext')
-rw-r--r--lib/sqlalchemy/ext/__init__.py1
-rw-r--r--lib/sqlalchemy/ext/associationproxy.py83
-rw-r--r--lib/sqlalchemy/ext/compiler.py110
-rw-r--r--lib/sqlalchemy/ext/declarative/__init__.py80
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py14
-rw-r--r--lib/sqlalchemy/ext/declarative/base.py20
-rw-r--r--lib/sqlalchemy/ext/declarative/clsregistry.py12
-rw-r--r--lib/sqlalchemy/ext/horizontal_shard.py34
-rw-r--r--lib/sqlalchemy/ext/hybrid.py16
-rw-r--r--lib/sqlalchemy/ext/instrumentation.py26
-rw-r--r--lib/sqlalchemy/ext/mutable.py101
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py60
-rw-r--r--lib/sqlalchemy/ext/serializer.py23
13 files changed, 339 insertions, 241 deletions
diff --git a/lib/sqlalchemy/ext/__init__.py b/lib/sqlalchemy/ext/__init__.py
index 7558ac268..bb99e60fc 100644
--- a/lib/sqlalchemy/ext/__init__.py
+++ b/lib/sqlalchemy/ext/__init__.py
@@ -3,4 +3,3 @@
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py
index 27c76eb6b..f6c0764e4 100644
--- a/lib/sqlalchemy/ext/associationproxy.py
+++ b/lib/sqlalchemy/ext/associationproxy.py
@@ -27,24 +27,25 @@ def association_proxy(target_collection, attr, **kw):
The returned value is an instance of :class:`.AssociationProxy`.
- Implements a Python property representing a relationship as a collection of
- simpler values, or a scalar value. The proxied property will mimic the collection type of
- the target (list, dict or set), or, in the case of a one to one relationship,
- a simple scalar value.
+ Implements a Python property representing a relationship as a collection
+ of simpler values, or a scalar value. The proxied property will mimic
+ the collection type of the target (list, dict or set), or, in the case of
+ a one to one relationship, a simple scalar value.
:param target_collection: Name of the attribute we'll proxy to.
This attribute is typically mapped by
:func:`~sqlalchemy.orm.relationship` to link to a target collection, but
can also be a many-to-one or non-scalar relationship.
- :param attr: Attribute on the associated instance or instances we'll proxy for.
+ :param attr: Attribute on the associated instance or instances we'll
+ proxy for.
For example, given a target collection of [obj1, obj2], a list created
by this proxy property would look like [getattr(obj1, *attr*),
getattr(obj2, *attr*)]
- If the relationship is one-to-one or otherwise uselist=False, then simply:
- getattr(obj, *attr*)
+ If the relationship is one-to-one or otherwise uselist=False, then
+ simply: getattr(obj, *attr*)
:param creator: optional.
@@ -89,34 +90,36 @@ class AssociationProxy(object):
:param target_collection: Name of the collection we'll proxy to,
usually created with :func:`.relationship`.
- :param attr: Attribute on the collected instances we'll proxy for. For example,
- given a target collection of [obj1, obj2], a list created by this
- proxy property would look like [getattr(obj1, attr), getattr(obj2,
- attr)]
+ :param attr: Attribute on the collected instances we'll proxy
+ for. For example, given a target collection of [obj1, obj2], a
+ list created by this proxy property would look like
+ [getattr(obj1, attr), getattr(obj2, attr)]
- :param creator: Optional. When new items are added to this proxied collection, new
- instances of the class collected by the target collection will be
- created. For list and set collections, the target class constructor
- will be called with the 'value' for the new instance. For dict
- types, two arguments are passed: key and value.
+ :param creator: Optional. When new items are added to this proxied
+ collection, new instances of the class collected by the target
+ collection will be created. For list and set collections, the
+ target class constructor will be called with the 'value' for the
+ new instance. For dict types, two arguments are passed:
+ key and value.
If you want to construct instances differently, supply a 'creator'
function that takes arguments as above and returns instances.
- :param getset_factory: Optional. Proxied attribute access is automatically handled by
- routines that get and set values based on the `attr` argument for
- this proxy.
+ :param getset_factory: Optional. Proxied attribute access is
+ automatically handled by routines that get and set values based on
+ the `attr` argument for this proxy.
If you would like to customize this behavior, you may supply a
`getset_factory` callable that produces a tuple of `getter` and
`setter` functions. The factory is called with two arguments, the
abstract type of the underlying collection and this proxy instance.
- :param proxy_factory: Optional. The type of collection to emulate is determined by
- sniffing the target collection. If your collection type can't be
- determined by duck typing or you'd like to use a different
- collection implementation, you may supply a factory function to
- produce those collections. Only applicable to non-scalar relationships.
+ :param proxy_factory: Optional. The type of collection to emulate is
+ determined by sniffing the target collection. If your collection
+ type can't be determined by duck typing or you'd like to use a
+ different collection implementation, you may supply a factory
+ function to produce those collections. Only applicable to
+ non-scalar relationships.
:param proxy_bulk_set: Optional, use with proxy_factory. See
the _set() method for details.
@@ -279,7 +282,8 @@ class AssociationProxy(object):
self.collection_class = util.duck_type_collection(lazy_collection())
if self.proxy_factory:
- return self.proxy_factory(lazy_collection, creator, self.value_attr, self)
+ return self.proxy_factory(
+ lazy_collection, creator, self.value_attr, self)
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
@@ -287,11 +291,14 @@ class AssociationProxy(object):
getter, setter = self._default_getset(self.collection_class)
if self.collection_class is list:
- return _AssociationList(lazy_collection, creator, getter, setter, self)
+ return _AssociationList(
+ lazy_collection, creator, getter, setter, self)
elif self.collection_class is dict:
- return _AssociationDict(lazy_collection, creator, getter, setter, self)
+ return _AssociationDict(
+ lazy_collection, creator, getter, setter, self)
elif self.collection_class is set:
- return _AssociationSet(lazy_collection, creator, getter, setter, self)
+ return _AssociationSet(
+ lazy_collection, creator, getter, setter, self)
else:
raise exc.ArgumentError(
'could not guess which interface to use for '
@@ -340,9 +347,11 @@ class AssociationProxy(object):
"""
if self._value_is_scalar:
- value_expr = getattr(self.target_class, self.value_attr).has(criterion, **kwargs)
+ value_expr = getattr(
+ self.target_class, self.value_attr).has(criterion, **kwargs)
else:
- value_expr = getattr(self.target_class, self.value_attr).any(criterion, **kwargs)
+ value_expr = getattr(
+ self.target_class, self.value_attr).any(criterion, **kwargs)
# check _value_is_scalar here, otherwise
# we're scalar->scalar - call .any() so that
@@ -409,12 +418,13 @@ class _lazy_collection(object):
return getattr(obj, self.target)
def __getstate__(self):
- return {'obj':self.ref(), 'target':self.target}
+ return {'obj': self.ref(), 'target': self.target}
def __setstate__(self, state):
self.ref = weakref.ref(state['obj'])
self.target = state['target']
+
class _AssociationCollection(object):
def __init__(self, lazy_collection, creator, getter, setter, parent):
"""Constructs an _AssociationCollection.
@@ -456,13 +466,14 @@ class _AssociationCollection(object):
return bool(self.col)
def __getstate__(self):
- return {'parent':self.parent, 'lazy_collection':self.lazy_collection}
+ return {'parent': self.parent, 'lazy_collection': self.lazy_collection}
def __setstate__(self, state):
self.parent = state['parent']
self.lazy_collection = state['lazy_collection']
self.parent._inflate(self)
+
class _AssociationList(_AssociationCollection):
"""Generic, converting, list-to-list proxy."""
@@ -652,6 +663,8 @@ class _AssociationList(_AssociationCollection):
_NotProvided = util.symbol('_NotProvided')
+
+
class _AssociationDict(_AssociationCollection):
"""Generic, converting, dict-to-dict proxy."""
@@ -734,7 +747,7 @@ class _AssociationDict(_AssociationCollection):
return self.col.iterkeys()
def values(self):
- return [ self._get(member) for member in self.col.values() ]
+ return [self._get(member) for member in self.col.values()]
def itervalues(self):
for key in self.col:
@@ -766,8 +779,8 @@ class _AssociationDict(_AssociationCollection):
len(a))
elif len(a) == 1:
seq_or_map = a[0]
- # discern dict from sequence - took the advice
- # from http://www.voidspace.org.uk/python/articles/duck_typing.shtml
+ # discern dict from sequence - took the advice from
+ # http://www.voidspace.org.uk/python/articles/duck_typing.shtml
# still not perfect :(
if hasattr(seq_or_map, 'keys'):
for item in seq_or_map:
diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py
index e3e668364..93984d0d1 100644
--- a/lib/sqlalchemy/ext/compiler.py
+++ b/lib/sqlalchemy/ext/compiler.py
@@ -9,8 +9,9 @@
Synopsis
========
-Usage involves the creation of one or more :class:`~sqlalchemy.sql.expression.ClauseElement`
-subclasses and one or more callables defining its compilation::
+Usage involves the creation of one or more
+:class:`~sqlalchemy.sql.expression.ClauseElement` subclasses and one or
+more callables defining its compilation::
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.expression import ColumnClause
@@ -58,7 +59,8 @@ invoked for the dialect in use::
def visit_alter_column(element, compiler, **kw):
return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name)
-The second ``visit_alter_table`` will be invoked when any ``postgresql`` dialect is used.
+The second ``visit_alter_table`` will be invoked when any ``postgresql``
+dialect is used.
Compiling sub-elements of a custom expression construct
=======================================================
@@ -99,10 +101,11 @@ Produces::
Cross Compiling between SQL and DDL compilers
---------------------------------------------
-SQL and DDL constructs are each compiled using different base compilers - ``SQLCompiler``
-and ``DDLCompiler``. A common need is to access the compilation rules of SQL expressions
-from within a DDL expression. The ``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as below where we generate a CHECK
-constraint that embeds a SQL expression::
+SQL and DDL constructs are each compiled using different base compilers -
+``SQLCompiler`` and ``DDLCompiler``. A common need is to access the
+compilation rules of SQL expressions from within a DDL expression. The
+``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as
+below where we generate a CHECK constraint that embeds a SQL expression::
@compiles(MyConstraint)
def compile_my_constraint(constraint, ddlcompiler, **kw):
@@ -116,20 +119,22 @@ constraint that embeds a SQL expression::
Enabling Autocommit on a Construct
==================================
-Recall from the section :ref:`autocommit` that the :class:`.Engine`, when asked to execute
-a construct in the absence of a user-defined transaction, detects if the given
-construct represents DML or DDL, that is, a data modification or data definition statement, which
-requires (or may require, in the case of DDL) that the transaction generated by the DBAPI be committed
-(recall that DBAPI always has a transaction going on regardless of what SQLAlchemy does). Checking
-for this is actually accomplished
-by checking for the "autocommit" execution option on the construct. When building a construct like
-an INSERT derivation, a new DDL type, or perhaps a stored procedure that alters data, the "autocommit"
-option needs to be set in order for the statement to function with "connectionless" execution
+Recall from the section :ref:`autocommit` that the :class:`.Engine`, when
+asked to execute a construct in the absence of a user-defined transaction,
+detects if the given construct represents DML or DDL, that is, a data
+modification or data definition statement, which requires (or may require,
+in the case of DDL) that the transaction generated by the DBAPI be committed
+(recall that DBAPI always has a transaction going on regardless of what
+SQLAlchemy does). Checking for this is actually accomplished by checking for
+the "autocommit" execution option on the construct. When building a
+construct like an INSERT derivation, a new DDL type, or perhaps a stored
+procedure that alters data, the "autocommit" option needs to be set in order
+for the statement to function with "connectionless" execution
(as described in :ref:`dbengine_implicit`).
-Currently a quick way to do this is to subclass :class:`.Executable`, then add the "autocommit" flag
-to the ``_execution_options`` dictionary (note this is a "frozen" dictionary which supplies a generative
-``union()`` method)::
+Currently a quick way to do this is to subclass :class:`.Executable`, then
+add the "autocommit" flag to the ``_execution_options`` dictionary (note this
+is a "frozen" dictionary which supplies a generative ``union()`` method)::
from sqlalchemy.sql.expression import Executable, ClauseElement
@@ -137,8 +142,9 @@ to the ``_execution_options`` dictionary (note this is a "frozen" dictionary whi
_execution_options = \\
Executable._execution_options.union({'autocommit': True})
-More succinctly, if the construct is truly similar to an INSERT, UPDATE, or DELETE, :class:`.UpdateBase`
-can be used, which already is a subclass of :class:`.Executable`, :class:`.ClauseElement` and includes the
+More succinctly, if the construct is truly similar to an INSERT, UPDATE, or
+DELETE, :class:`.UpdateBase` can be used, which already is a subclass
+of :class:`.Executable`, :class:`.ClauseElement` and includes the
``autocommit`` flag::
from sqlalchemy.sql.expression import UpdateBase
@@ -150,7 +156,8 @@ can be used, which already is a subclass of :class:`.Executable`, :class:`.Claus
-DDL elements that subclass :class:`.DDLElement` already have the "autocommit" flag turned on.
+DDL elements that subclass :class:`.DDLElement` already have the
+"autocommit" flag turned on.
@@ -158,13 +165,16 @@ DDL elements that subclass :class:`.DDLElement` already have the "autocommit" fl
Changing the default compilation of existing constructs
=======================================================
-The compiler extension applies just as well to the existing constructs. When overriding
-the compilation of a built in SQL construct, the @compiles decorator is invoked upon
-the appropriate class (be sure to use the class, i.e. ``Insert`` or ``Select``, instead of the creation function such as ``insert()`` or ``select()``).
+The compiler extension applies just as well to the existing constructs. When
+overriding the compilation of a built in SQL construct, the @compiles
+decorator is invoked upon the appropriate class (be sure to use the class,
+i.e. ``Insert`` or ``Select``, instead of the creation function such
+as ``insert()`` or ``select()``).
-Within the new compilation function, to get at the "original" compilation routine,
-use the appropriate visit_XXX method - this because compiler.process() will call upon the
-overriding routine and cause an endless loop. Such as, to add "prefix" to all insert statements::
+Within the new compilation function, to get at the "original" compilation
+routine, use the appropriate visit_XXX method - this
+because compiler.process() will call upon the overriding routine and cause
+an endless loop. Such as, to add "prefix" to all insert statements::
from sqlalchemy.sql.expression import Insert
@@ -172,14 +182,16 @@ overriding routine and cause an endless loop. Such as, to add "prefix" to all
def prefix_inserts(insert, compiler, **kw):
return compiler.visit_insert(insert.prefix_with("some prefix"), **kw)
-The above compiler will prefix all INSERT statements with "some prefix" when compiled.
+The above compiler will prefix all INSERT statements with "some prefix" when
+compiled.
.. _type_compilation_extension:
Changing Compilation of Types
=============================
-``compiler`` works for types, too, such as below where we implement the MS-SQL specific 'max' keyword for ``String``/``VARCHAR``::
+``compiler`` works for types, too, such as below where we implement the
+MS-SQL specific 'max' keyword for ``String``/``VARCHAR``::
@compiles(String, 'mssql')
@compiles(VARCHAR, 'mssql')
@@ -248,10 +260,10 @@ A synopsis is as follows:
``execute_at()`` method, allowing the construct to be invoked during CREATE
TABLE and DROP TABLE sequences.
-* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which should be
- used with any expression class that represents a "standalone" SQL statement that
- can be passed directly to an ``execute()`` method. It is already implicit
- within ``DDLElement`` and ``FunctionElement``.
+* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which
+ should be used with any expression class that represents a "standalone"
+ SQL statement that can be passed directly to an ``execute()`` method. It
+ is already implicit within ``DDLElement`` and ``FunctionElement``.
Further Examples
================
@@ -259,12 +271,13 @@ Further Examples
"UTC timestamp" function
-------------------------
-A function that works like "CURRENT_TIMESTAMP" except applies the appropriate conversions
-so that the time is in UTC time. Timestamps are best stored in relational databases
-as UTC, without time zones. UTC so that your database doesn't think time has gone
-backwards in the hour when daylight savings ends, without timezones because timezones
-are like character encodings - they're best applied only at the endpoints of an
-application (i.e. convert to UTC upon user input, re-apply desired timezone upon display).
+A function that works like "CURRENT_TIMESTAMP" except applies the
+appropriate conversions so that the time is in UTC time. Timestamps are best
+stored in relational databases as UTC, without time zones. UTC so that your
+database doesn't think time has gone backwards in the hour when daylight
+savings ends, without timezones because timezones are like character
+encodings - they're best applied only at the endpoints of an application
+(i.e. convert to UTC upon user input, re-apply desired timezone upon display).
For Postgresql and Microsoft SQL Server::
@@ -298,10 +311,10 @@ Example usage::
"GREATEST" function
-------------------
-The "GREATEST" function is given any number of arguments and returns the one that is
-of the highest value - it's equivalent to Python's ``max`` function. A SQL
-standard version versus a CASE based version which only accommodates two
-arguments::
+The "GREATEST" function is given any number of arguments and returns the one
+that is of the highest value - it's equivalent to Python's ``max``
+function. A SQL standard version versus a CASE based version which only
+accommodates two arguments::
from sqlalchemy.sql import expression
from sqlalchemy.ext.compiler import compiles
@@ -339,7 +352,8 @@ Example usage::
"false" expression
------------------
-Render a "false" constant expression, rendering as "0" on platforms that don't have a "false" constant::
+Render a "false" constant expression, rendering as "0" on platforms that
+don't have a "false" constant::
from sqlalchemy.sql import expression
from sqlalchemy.ext.compiler import compiles
@@ -370,6 +384,7 @@ Example usage::
from .. import exc
from ..sql import visitors
+
def compiles(class_, *specs):
"""Register a function as a compiler for a
given :class:`.ClauseElement` type."""
@@ -384,7 +399,8 @@ def compiles(class_, *specs):
existing.specs['default'] = existing_dispatch
# TODO: why is the lambda needed ?
- setattr(class_, '_compiler_dispatch', lambda *arg, **kw: existing(*arg, **kw))
+ setattr(class_, '_compiler_dispatch',
+ lambda *arg, **kw: existing(*arg, **kw))
setattr(class_, '_compiler_dispatcher', existing)
if specs:
@@ -396,6 +412,7 @@ def compiles(class_, *specs):
return fn
return decorate
+
def deregister(class_):
"""Remove all custom compilers associated with a given
:class:`.ClauseElement` type."""
@@ -422,4 +439,3 @@ class _dispatcher(object):
"%s construct has no default "
"compilation handler." % type(element))
return fn(element, compiler, **kw)
-
diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py
index bf6e6786e..1fb3feb6a 100644
--- a/lib/sqlalchemy/ext/declarative/__init__.py
+++ b/lib/sqlalchemy/ext/declarative/__init__.py
@@ -51,7 +51,8 @@ assigned.
To name columns explicitly with a name distinct from their mapped attribute,
just give the column a name. Below, column "some_table_id" is mapped to the
-"id" attribute of `SomeClass`, but in SQL will be represented as "some_table_id"::
+"id" attribute of `SomeClass`, but in SQL will be represented as
+"some_table_id"::
class SomeClass(Base):
__tablename__ = 'some_table'
@@ -312,7 +313,8 @@ such as those which already take advantage of the data-driven nature of
Note that when the ``__table__`` approach is used, the object is immediately
usable as a plain :class:`.Table` within the class declaration body itself,
as a Python class is only another syntactical block. Below this is illustrated
-by using the ``id`` column in the ``primaryjoin`` condition of a :func:`.relationship`::
+by using the ``id`` column in the ``primaryjoin`` condition of a
+:func:`.relationship`::
class MyClass(Base):
__table__ = Table('my_table', Base.metadata,
@@ -324,8 +326,8 @@ by using the ``id`` column in the ``primaryjoin`` condition of a :func:`.relatio
primaryjoin=Widget.myclass_id==__table__.c.id)
Similarly, mapped attributes which refer to ``__table__`` can be placed inline,
-as below where we assign the ``name`` column to the attribute ``_name``, generating
-a synonym for ``name``::
+as below where we assign the ``name`` column to the attribute ``_name``,
+generating a synonym for ``name``::
from sqlalchemy.ext.declarative import synonym_for
@@ -383,9 +385,9 @@ Mapper Configuration
Declarative makes use of the :func:`~.orm.mapper` function internally
when it creates the mapping to the declared table. The options
-for :func:`~.orm.mapper` are passed directly through via the ``__mapper_args__``
-class attribute. As always, arguments which reference locally
-mapped columns can reference them directly from within the
+for :func:`~.orm.mapper` are passed directly through via the
+``__mapper_args__`` class attribute. As always, arguments which reference
+locally mapped columns can reference them directly from within the
class declaration::
from datetime import datetime
@@ -521,8 +523,8 @@ In a situation like this, Declarative can't be sure
of the intent, especially if the ``start_date`` columns had, for example,
different types. A situation like this can be resolved by using
:class:`.declared_attr` to define the :class:`.Column` conditionally, taking
-care to return the **existing column** via the parent ``__table__`` if it already
-exists::
+care to return the **existing column** via the parent ``__table__`` if it
+already exists::
from sqlalchemy.ext.declarative import declared_attr
@@ -654,12 +656,13 @@ Using the Concrete Helpers
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Helper classes provides a simpler pattern for concrete inheritance.
-With these objects, the ``__declare_last__`` helper is used to configure the "polymorphic"
-loader for the mapper after all subclasses have been declared.
+With these objects, the ``__declare_last__`` helper is used to configure the
+"polymorphic" loader for the mapper after all subclasses have been declared.
.. versionadded:: 0.7.3
-An abstract base can be declared using the :class:`.AbstractConcreteBase` class::
+An abstract base can be declared using the
+:class:`.AbstractConcreteBase` class::
from sqlalchemy.ext.declarative import AbstractConcreteBase
@@ -757,8 +760,8 @@ Augmenting the Base
In addition to using a pure mixin, most of the techniques in this
section can also be applied to the base class itself, for patterns that
-should apply to all classes derived from a particular base. This
-is achieved using the ``cls`` argument of the :func:`.declarative_base` function::
+should apply to all classes derived from a particular base. This is achieved
+using the ``cls`` argument of the :func:`.declarative_base` function::
from sqlalchemy.ext.declarative import declared_attr
@@ -778,9 +781,9 @@ is achieved using the ``cls`` argument of the :func:`.declarative_base` function
class MyModel(Base):
name = Column(String(1000))
-Where above, ``MyModel`` and all other classes that derive from ``Base`` will have
-a table name derived from the class name, an ``id`` primary key column, as well as
-the "InnoDB" engine for MySQL.
+Where above, ``MyModel`` and all other classes that derive from ``Base`` will
+have a table name derived from the class name, an ``id`` primary key column,
+as well as the "InnoDB" engine for MySQL.
Mixing in Columns
~~~~~~~~~~~~~~~~~
@@ -840,7 +843,8 @@ extension can use the resulting :class:`.Column` object as returned by
the method without the need to copy it.
.. versionchanged:: > 0.6.5
- Rename 0.6.5 ``sqlalchemy.util.classproperty`` into :class:`~.declared_attr`.
+ Rename 0.6.5 ``sqlalchemy.util.classproperty``
+ into :class:`~.declared_attr`.
Columns generated by :class:`~.declared_attr` can also be
referenced by ``__mapper_args__`` to a limited degree, currently
@@ -933,12 +937,13 @@ Mixing in Association Proxy and Other Attributes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Mixins can specify user-defined attributes as well as other extension
-units such as :func:`.association_proxy`. The usage of :class:`.declared_attr`
-is required in those cases where the attribute must be tailored specifically
-to the target subclass. An example is when constructing multiple
-:func:`.association_proxy` attributes which each target a different type
-of child object. Below is an :func:`.association_proxy` / mixin example
-which provides a scalar list of string values to an implementing class::
+units such as :func:`.association_proxy`. The usage of
+:class:`.declared_attr` is required in those cases where the attribute must
+be tailored specifically to the target subclass. An example is when
+constructing multiple :func:`.association_proxy` attributes which each
+target a different type of child object. Below is an
+:func:`.association_proxy` / mixin example which provides a scalar list of
+string values to an implementing class::
from sqlalchemy import Column, Integer, ForeignKey, String
from sqlalchemy.orm import relationship
@@ -1138,8 +1143,8 @@ Creating Indexes with Mixins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To define a named, potentially multicolumn :class:`.Index` that applies to all
-tables derived from a mixin, use the "inline" form of :class:`.Index` and establish
-it as part of ``__table_args__``::
+tables derived from a mixin, use the "inline" form of :class:`.Index` and
+establish it as part of ``__table_args__``::
class MyMixin(object):
a = Column(Integer)
@@ -1160,9 +1165,9 @@ Special Directives
~~~~~~~~~~~~~~~~~~~~~~
The ``__declare_last__()`` hook allows definition of
-a class level function that is automatically called by the :meth:`.MapperEvents.after_configured`
-event, which occurs after mappings are assumed to be completed and the 'configure' step
-has finished::
+a class level function that is automatically called by the
+:meth:`.MapperEvents.after_configured` event, which occurs after mappings are
+assumed to be completed and the 'configure' step has finished::
class MyClass(Base):
@classmethod
@@ -1178,9 +1183,9 @@ has finished::
~~~~~~~~~~~~~~~~~~~
``__abstract__`` causes declarative to skip the production
-of a table or mapper for the class entirely. A class can be added within a hierarchy
-in the same way as mixin (see :ref:`declarative_mixins`), allowing subclasses to extend
-just from the special class::
+of a table or mapper for the class entirely. A class can be added within a
+hierarchy in the same way as mixin (see :ref:`declarative_mixins`), allowing
+subclasses to extend just from the special class::
class SomeAbstractBase(Base):
__abstract__ = True
@@ -1195,8 +1200,8 @@ just from the special class::
class MyMappedClass(SomeAbstractBase):
""
-One possible use of ``__abstract__`` is to use a distinct :class:`.MetaData` for different
-bases::
+One possible use of ``__abstract__`` is to use a distinct
+:class:`.MetaData` for different bases::
Base = declarative_base()
@@ -1208,9 +1213,10 @@ bases::
__abstract__ = True
metadata = MetaData()
-Above, classes which inherit from ``DefaultBase`` will use one :class:`.MetaData` as the
-registry of tables, and those which inherit from ``OtherBase`` will use a different one.
-The tables themselves can then be created perhaps within distinct databases::
+Above, classes which inherit from ``DefaultBase`` will use one
+:class:`.MetaData` as the registry of tables, and those which inherit from
+``OtherBase`` will use a different one. The tables themselves can then be
+created perhaps within distinct databases::
DefaultBase.metadata.create_all(some_engine)
OtherBase.metadata_create_all(some_other_engine)
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index 0db3f4e6b..b4b6f733b 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -33,6 +33,7 @@ def instrument_declarative(cls, registry, metadata):
cls.metadata = metadata
_as_declarative(cls, cls.__name__, cls.__dict__)
+
def has_inherited_table(cls):
"""Given a class, return True if any of the classes it inherits from has a
mapped table, otherwise return False.
@@ -42,6 +43,7 @@ def has_inherited_table(cls):
return True
return False
+
class DeclarativeMeta(type):
def __init__(cls, classname, bases, dict_):
if '_decl_class_registry' not in cls.__dict__:
@@ -51,6 +53,7 @@ class DeclarativeMeta(type):
def __setattr__(cls, key, value):
_add_attribute(cls, key, value)
+
def synonym_for(name, map_column=False):
"""Decorator, make a Python @property a query synonym for a column.
@@ -73,6 +76,7 @@ def synonym_for(name, map_column=False):
return _orm_synonym(name, map_column=map_column, descriptor=fn)
return decorate
+
def comparable_using(comparator_factory):
"""Decorator, allow a Python @property to be used in query criteria.
@@ -95,6 +99,7 @@ def comparable_using(comparator_factory):
return comparable_property(comparator_factory, fn)
return decorate
+
class declared_attr(interfaces._MappedAttribute, property):
"""Mark a class-level method as representing the definition of
a mapped property or special declarative member name.
@@ -154,6 +159,7 @@ class declared_attr(interfaces._MappedAttribute, property):
def __get__(desc, self, cls):
return desc.fget(cls)
+
def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
name='Base', constructor=_declarative_constructor,
class_registry=None,
@@ -231,6 +237,7 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
return metaclass(name, bases, class_dict)
+
class ConcreteBase(object):
"""A helper class for 'concrete' declarative mappings.
@@ -285,6 +292,7 @@ class ConcreteBase(object):
m._set_with_polymorphic(("*", pjoin))
m._set_polymorphic_on(pjoin.c.type)
+
class AbstractConcreteBase(ConcreteBase):
"""A helper class for 'concrete' declarative mappings.
@@ -362,7 +370,8 @@ class DeferredReflection(object):
method is called which first reflects all :class:`.Table`
objects created so far. Classes can define it as such::
- from sqlalchemy.ext.declarative import declarative_base, DeferredReflection
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy.ext.declarative import DeferredReflection
Base = declarative_base()
class MyClass(DeferredReflection, Base):
@@ -370,7 +379,8 @@ class DeferredReflection(object):
Above, ``MyClass`` is not yet mapped. After a series of
classes have been defined in the above fashion, all tables
- can be reflected and mappings created using :meth:`.DeferredReflection.prepare`::
+ can be reflected and mappings created using
+ :meth:`.DeferredReflection.prepare`::
engine = create_engine("someengine://...")
DeferredReflection.prepare(engine)
diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py
index 8e8f5626c..954a9abfe 100644
--- a/lib/sqlalchemy/ext/declarative/base.py
+++ b/lib/sqlalchemy/ext/declarative/base.py
@@ -15,6 +15,7 @@ from ...sql import expression
from ... import event
from . import clsregistry
+
def _declared_mapping_info(cls):
# deferred mapping
if cls in _MapperConfig.configs:
@@ -192,7 +193,8 @@ def _as_declarative(cls, classname, dict_):
# in multi-column ColumnProperties.
if key == c.key:
del our_stuff[key]
- declared_columns = sorted(declared_columns, key=lambda c: c._creation_order)
+ declared_columns = sorted(
+ declared_columns, key=lambda c: c._creation_order)
table = None
if hasattr(cls, '__table_cls__'):
@@ -217,9 +219,10 @@ def _as_declarative(cls, classname, dict_):
if autoload:
table_kw['autoload'] = True
- cls.__table__ = table = table_cls(tablename, cls.metadata,
- *(tuple(declared_columns) + tuple(args)),
- **table_kw)
+ cls.__table__ = table = table_cls(
+ tablename, cls.metadata,
+ *(tuple(declared_columns) + tuple(args)),
+ **table_kw)
else:
table = cls.__table__
if declared_columns:
@@ -291,6 +294,7 @@ def _as_declarative(cls, classname, dict_):
if not hasattr(cls, '_sa_decl_prepare'):
mt.map()
+
class _MapperConfig(object):
configs = util.OrderedDict()
mapped_table = None
@@ -375,6 +379,7 @@ class _MapperConfig(object):
**mapper_args
)
+
def _add_attribute(cls, key, value):
"""add an attribute to an existing declarative class.
@@ -395,14 +400,15 @@ def _add_attribute(cls, key, value):
cls.__mapper__.add_property(key, value)
elif isinstance(value, MapperProperty):
cls.__mapper__.add_property(
- key,
- clsregistry._deferred_relationship(cls, value)
- )
+ key,
+ clsregistry._deferred_relationship(cls, value)
+ )
else:
type.__setattr__(cls, key, value)
else:
type.__setattr__(cls, key, value)
+
def _declarative_constructor(self, **kwargs):
"""A simple constructor that allows initialization from kwargs.
diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py
index 47450c5b7..a0e177f77 100644
--- a/lib/sqlalchemy/ext/declarative/clsregistry.py
+++ b/lib/sqlalchemy/ext/declarative/clsregistry.py
@@ -22,6 +22,7 @@ import weakref
# themselves when all references to contained classes are removed.
_registries = set()
+
def add_class(classname, cls):
"""Add a class to the _decl_class_registry associated with the
given declarative class.
@@ -111,6 +112,7 @@ class _MultipleClassMarker(object):
)
self.contents.add(weakref.ref(item, self._remove_item))
+
class _ModuleMarker(object):
""""refers to a module name within
_decl_class_registry.
@@ -160,7 +162,6 @@ class _ModuleMarker(object):
on_remove=lambda: self._remove_item(name))
-
class _ModNS(object):
def __init__(self, parent):
self.__parent = parent
@@ -180,6 +181,7 @@ class _ModNS(object):
raise AttributeError("Module %r has no mapped classes "
"registered under the name %r" % (self.__parent.name, key))
+
class _GetColumns(object):
def __init__(self, cls):
self.cls = cls
@@ -200,6 +202,7 @@ class _GetColumns(object):
" directly to a Column)." % key)
return getattr(self.cls, key)
+
class _GetTable(object):
def __init__(self, key, metadata):
self.key = key
@@ -210,11 +213,13 @@ class _GetTable(object):
_get_table_key(key, self.key)
]
+
def _determine_container(key, value):
if isinstance(value, _MultipleClassMarker):
value = value.attempt_get([], key)
return _GetColumns(value)
+
def _resolver(cls, prop):
def resolve_arg(arg):
import sqlalchemy
@@ -232,11 +237,13 @@ def _resolver(cls, prop):
return _GetTable(key, cls.metadata)
elif '_sa_module_registry' in cls._decl_class_registry and \
key in cls._decl_class_registry['_sa_module_registry']:
- return cls._decl_class_registry['_sa_module_registry'].resolve_attr(key)
+ registry = cls._decl_class_registry['_sa_module_registry']
+ return registry.resolve_attr(key)
else:
return fallback[key]
d = util.PopulateDict(access_cls)
+
def return_cls():
try:
x = eval(arg, globals(), d)
@@ -256,6 +263,7 @@ def _resolver(cls, prop):
return return_cls
return resolve_arg
+
def _deferred_relationship(cls, prop):
if isinstance(prop, RelationshipProperty):
diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py
index f55cfae0a..1edc4d4c2 100644
--- a/lib/sqlalchemy/ext/horizontal_shard.py
+++ b/lib/sqlalchemy/ext/horizontal_shard.py
@@ -14,13 +14,13 @@ the source distribution.
"""
-from .. import exc as sa_exc
from .. import util
from ..orm.session import Session
from ..orm.query import Query
__all__ = ['ShardedSession', 'ShardedQuery']
+
class ShardedQuery(Query):
def __init__(self, *args, **kwargs):
super(ShardedQuery, self).__init__(*args, **kwargs)
@@ -72,28 +72,29 @@ class ShardedQuery(Query):
else:
return None
+
class ShardedSession(Session):
def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None,
query_cls=ShardedQuery, **kwargs):
"""Construct a ShardedSession.
- :param shard_chooser: A callable which, passed a Mapper, a mapped instance, and possibly a
- SQL clause, returns a shard ID. This id may be based off of the
- attributes present within the object, or on some round-robin
- scheme. If the scheme is based on a selection, it should set
- whatever state on the instance to mark it in the future as
+ :param shard_chooser: A callable which, passed a Mapper, a mapped
+ instance, and possibly a SQL clause, returns a shard ID. This id
+ may be based off of the attributes present within the object, or on
+ some round-robin scheme. If the scheme is based on a selection, it
+ should set whatever state on the instance to mark it in the future as
participating in that shard.
- :param id_chooser: A callable, passed a query and a tuple of identity values, which
- should return a list of shard ids where the ID might reside. The
- databases will be queried in the order of this listing.
+ :param id_chooser: A callable, passed a query and a tuple of identity
+ values, which should return a list of shard ids where the ID might
+ reside. The databases will be queried in the order of this listing.
- :param query_chooser: For a given Query, returns the list of shard_ids where the query
- should be issued. Results from all shards returned will be combined
- together into a single listing.
+ :param query_chooser: For a given Query, returns the list of shard_ids
+ where the query should be issued. Results from all shards returned
+ will be combined together into a single listing.
- :param shards: A dictionary of string shard names to :class:`~sqlalchemy.engine.Engine`
- objects.
+ :param shards: A dictionary of string shard names
+ to :class:`~sqlalchemy.engine.Engine` objects.
"""
super(ShardedSession, self).__init__(query_cls=query_cls, **kwargs)
@@ -117,12 +118,11 @@ class ShardedSession(Session):
shard_id=shard_id,
instance=instance).contextual_connect(**kwargs)
- def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw):
+ def get_bind(self, mapper, shard_id=None,
+ instance=None, clause=None, **kw):
if shard_id is None:
shard_id = self.shard_chooser(mapper, instance, clause=clause)
return self.__binds[shard_id]
def bind_shard(self, shard_id, bind):
self.__binds[shard_id] = bind
-
-
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py
index 57d39866c..047b2ff95 100644
--- a/lib/sqlalchemy/ext/hybrid.py
+++ b/lib/sqlalchemy/ext/hybrid.py
@@ -240,8 +240,8 @@ The above hybrid property ``balance`` works with the first
in-Python getter/setter methods can treat ``accounts`` as a Python
list available on ``self``.
-However, at the expression level, it's expected that the ``User`` class will be used
-in an appropriate context such that an appropriate join to
+However, at the expression level, it's expected that the ``User`` class will
+be used in an appropriate context such that an appropriate join to
``SavingsAccount`` will be present::
>>> print Session().query(User, User.balance).\\
@@ -268,11 +268,10 @@ Correlated Subquery Relationship Hybrid
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We can, of course, forego being dependent on the enclosing query's usage
-of joins in favor of the correlated
-subquery, which can portably be packed into a single colunn expression.
-A correlated subquery is more portable, but often performs more poorly
-at the SQL level.
-Using the same technique illustrated at :ref:`mapper_column_property_sql_expressions`,
+of joins in favor of the correlated subquery, which can portably be packed
+into a single colunn expression. A correlated subquery is more portable, but
+often performs more poorly at the SQL level. Using the same technique
+illustrated at :ref:`mapper_column_property_sql_expressions`,
we can adjust our ``SavingsAccount`` example to aggregate the balances for
*all* accounts, and use a correlated subquery for the column expression::
@@ -629,6 +628,7 @@ there's probably a whole lot of amazing things it can be used for.
from .. import util
from ..orm import attributes, interfaces
+
class hybrid_method(object):
"""A decorator which allows definition of a Python object method with both
instance-level and class-level behavior.
@@ -668,6 +668,7 @@ class hybrid_method(object):
self.expr = expr
return self
+
class hybrid_property(object):
"""A decorator which allows definition of a Python descriptor with both
instance-level and class-level behavior.
@@ -750,6 +751,7 @@ class hybrid_property(object):
self.expr = expr
return self
+
class Comparator(interfaces.PropComparator):
"""A helper class that allows easy construction of custom
:class:`~.orm.interfaces.PropComparator`
diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py
index f840ad066..bb44a492c 100644
--- a/lib/sqlalchemy/ext/instrumentation.py
+++ b/lib/sqlalchemy/ext/instrumentation.py
@@ -61,6 +61,7 @@ attribute.
"""
+
def find_native_user_instrumentation_hook(cls):
"""Find user-specified instrumentation management for a class."""
return getattr(cls, INSTRUMENTATION_MANAGER, None)
@@ -81,6 +82,7 @@ ClassManager instrumentation is used.
"""
+
class ExtendedInstrumentationRegistry(InstrumentationFactory):
"""Extends :class:`.InstrumentationFactory` with additional
bookkeeping, to accommodate multiple types of
@@ -169,17 +171,21 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory):
def state_of(self, instance):
if instance is None:
raise AttributeError("None has no persistent state.")
- return self._state_finders.get(instance.__class__, _default_state_getter)(instance)
+ return self._state_finders.get(
+ instance.__class__, _default_state_getter)(instance)
def dict_of(self, instance):
if instance is None:
raise AttributeError("None has no persistent state.")
- return self._dict_finders.get(instance.__class__, _default_dict_getter)(instance)
+ return self._dict_finders.get(
+ instance.__class__, _default_dict_getter)(instance)
+
orm_instrumentation._instrumentation_factory = \
_instrumentation_factory = ExtendedInstrumentationRegistry()
orm_instrumentation.instrumentation_finders = instrumentation_finders
+
class InstrumentationManager(object):
"""User-defined class instrumentation extension.
@@ -259,6 +265,7 @@ class InstrumentationManager(object):
def dict_getter(self, class_):
return lambda inst: self.get_instance_dict(class_, inst)
+
class _ClassInstrumentationAdapter(ClassManager):
"""Adapts a user-defined InstrumentationManager to a ClassManager."""
@@ -353,6 +360,7 @@ class _ClassInstrumentationAdapter(ClassManager):
def dict_getter(self):
return self._get_dict
+
def _install_instrumented_lookups():
"""Replace global class/object management functions
with ExtendedInstrumentationRegistry implementations, which
@@ -368,22 +376,24 @@ def _install_instrumented_lookups():
"""
_install_lookups(
dict(
- instance_state = _instrumentation_factory.state_of,
- instance_dict = _instrumentation_factory.dict_of,
- manager_of_class = _instrumentation_factory.manager_of_class
+ instance_state=_instrumentation_factory.state_of,
+ instance_dict=_instrumentation_factory.dict_of,
+ manager_of_class=_instrumentation_factory.manager_of_class
)
)
+
def _reinstall_default_lookups():
"""Restore simplified lookups."""
_install_lookups(
dict(
- instance_state = _default_state_getter,
- instance_dict = _default_dict_getter,
- manager_of_class = _default_manager_getter
+ instance_state=_default_state_getter,
+ instance_dict=_default_dict_getter,
+ manager_of_class=_default_manager_getter
)
)
+
def _install_lookups(lookups):
global instance_state, instance_dict, manager_of_class
instance_state = lookups['instance_state']
diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py
index db05a82b4..36d60d6d5 100644
--- a/lib/sqlalchemy/ext/mutable.py
+++ b/lib/sqlalchemy/ext/mutable.py
@@ -7,13 +7,13 @@
"""Provide support for tracking of in-place changes to scalar values,
which are propagated into ORM change events on owning parent objects.
-The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy approach to in-place
-mutations of scalar values, established by the :class:`.types.MutableType`
-class as well as the ``mutable=True`` type flag, with a system that allows
-change events to be propagated from the value to the owning parent, thereby
-removing the need for the ORM to maintain copies of values as well as the very
-expensive requirement of scanning through all "mutable" values on each flush
-call, looking for changes.
+The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy
+approach to in-place mutations of scalar values, established by the
+:class:`.types.MutableType` class as well as the ``mutable=True`` type flag,
+with a system that allows change events to be propagated from the value to
+the owning parent, thereby removing the need for the ORM to maintain copies
+of values as well as the very expensive requirement of scanning through all
+"mutable" values on each flush call, looking for changes.
.. _mutable_scalars:
@@ -43,8 +43,8 @@ JSON strings before being persisted::
value = json.loads(value)
return value
-The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable`
-extension can be used
+The usage of ``json`` is only for the purposes of example. The
+:mod:`sqlalchemy.ext.mutable` extension can be used
with any type whose target Python type may be mutable, including
:class:`.PickleType`, :class:`.postgresql.ARRAY`, etc.
@@ -86,19 +86,19 @@ The above dictionary class takes the approach of subclassing the Python
built-in ``dict`` to produce a dict
subclass which routes all mutation events through ``__setitem__``. There are
variants on this approach, such as subclassing ``UserDict.UserDict`` or
-``collections.MutableMapping``; the part that's important to this
-example is that the :meth:`.Mutable.changed` method is called whenever an in-place change to the
-datastructure takes place.
+``collections.MutableMapping``; the part that's important to this example is
+that the :meth:`.Mutable.changed` method is called whenever an in-place
+change to the datastructure takes place.
We also redefine the :meth:`.Mutable.coerce` method which will be used to
convert any values that are not instances of ``MutableDict``, such
as the plain dictionaries returned by the ``json`` module, into the
-appropriate type. Defining this method is optional; we could just as well created our
-``JSONEncodedDict`` such that it always returns an instance of ``MutableDict``,
-and additionally ensured that all calling code uses ``MutableDict``
-explicitly. When :meth:`.Mutable.coerce` is not overridden, any values
-applied to a parent object which are not instances of the mutable type
-will raise a ``ValueError``.
+appropriate type. Defining this method is optional; we could just as well
+created our ``JSONEncodedDict`` such that it always returns an instance
+of ``MutableDict``, and additionally ensured that all calling code
+uses ``MutableDict`` explicitly. When :meth:`.Mutable.coerce` is not
+overridden, any values applied to a parent object which are not instances
+of the mutable type will raise a ``ValueError``.
Our new ``MutableDict`` type offers a class method
:meth:`~.Mutable.as_mutable` which we can use within column metadata
@@ -156,9 +156,10 @@ will flag the attribute as "dirty" on the parent object::
True
The ``MutableDict`` can be associated with all future instances
-of ``JSONEncodedDict`` in one step, using :meth:`~.Mutable.associate_with`. This
-is similar to :meth:`~.Mutable.as_mutable` except it will intercept
-all occurrences of ``MutableDict`` in all mappings unconditionally, without
+of ``JSONEncodedDict`` in one step, using
+:meth:`~.Mutable.associate_with`. This is similar to
+:meth:`~.Mutable.as_mutable` except it will intercept all occurrences
+of ``MutableDict`` in all mappings unconditionally, without
the need to declare it individually::
MutableDict.associate_with(JSONEncodedDict)
@@ -330,11 +331,14 @@ from ..orm.attributes import flag_modified
from .. import event, types
from ..orm import mapper, object_mapper
from ..util import memoized_property
-from .. import exc
import weakref
+
class MutableBase(object):
- """Common base class to :class:`.Mutable` and :class:`.MutableComposite`."""
+ """Common base class to :class:`.Mutable`
+ and :class:`.MutableComposite`.
+
+ """
@memoized_property
def _parents(self):
@@ -356,7 +360,8 @@ class MutableBase(object):
"""
if value is None:
return None
- raise ValueError("Attribute '%s' does not accept objects of type %s" % (key, type(value)))
+ msg = "Attribute '%s' does not accept objects of type %s"
+ raise ValueError(msg % (key, type(value)))
@classmethod
def _listen_on_attribute(cls, attribute, coerce, parent_cls):
@@ -414,12 +419,17 @@ class MutableBase(object):
for val in state_dict['ext.mutable.values']:
val._parents[state.obj()] = key
+ event.listen(parent_cls, 'load', load,
+ raw=True, propagate=True)
+ event.listen(parent_cls, 'refresh', load,
+ raw=True, propagate=True)
+ event.listen(attribute, 'set', set,
+ raw=True, retval=True, propagate=True)
+ event.listen(parent_cls, 'pickle', pickle,
+ raw=True, propagate=True)
+ event.listen(parent_cls, 'unpickle', unpickle,
+ raw=True, propagate=True)
- event.listen(parent_cls, 'load', load, raw=True, propagate=True)
- event.listen(parent_cls, 'refresh', load, raw=True, propagate=True)
- event.listen(attribute, 'set', set, raw=True, retval=True, propagate=True)
- event.listen(parent_cls, 'pickle', pickle, raw=True, propagate=True)
- event.listen(parent_cls, 'unpickle', unpickle, raw=True, propagate=True)
class Mutable(MutableBase):
"""Mixin that defines transparent propagation of change
@@ -448,15 +458,16 @@ class Mutable(MutableBase):
"""Associate this wrapper with all future mapped columns
of the given type.
- This is a convenience method that calls ``associate_with_attribute`` automatically.
+ This is a convenience method that calls
+ ``associate_with_attribute`` automatically.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
- :meth:`.associate_with` for types that are permanent to an application,
- not with ad-hoc types else this will cause unbounded growth
- in memory usage.
+ :meth:`.associate_with` for types that are permanent to an
+ application, not with ad-hoc types else this will cause unbounded
+ growth in memory usage.
"""
@@ -483,8 +494,8 @@ class Mutable(MutableBase):
)
Note that the returned type is always an instance, even if a class
- is given, and that only columns which are declared specifically with that
- type instance receive additional instrumentation.
+ is given, and that only columns which are declared specifically with
+ that type instance receive additional instrumentation.
To associate a particular mutable type with all occurrences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
@@ -511,11 +522,13 @@ class Mutable(MutableBase):
return sqltype
+
class _MutableCompositeMeta(type):
def __init__(cls, classname, bases, dict_):
cls._setup_listeners()
return type.__init__(cls, classname, bases, dict_)
+
class MutableComposite(MutableBase):
"""Mixin that defines transparent propagation of change
events on a SQLAlchemy "composite" object to its
@@ -526,10 +539,10 @@ class MutableComposite(MutableBase):
.. warning::
The listeners established by the :class:`.MutableComposite`
- class are *global* to all mappers, and are *not* garbage collected. Only use
- :class:`.MutableComposite` for types that are permanent to an application,
- not with ad-hoc types else this will cause unbounded growth
- in memory usage.
+ class are *global* to all mappers, and are *not* garbage
+ collected. Only use :class:`.MutableComposite` for types that are
+ permanent to an application, not with ad-hoc types else this will
+ cause unbounded growth in memory usage.
"""
__metaclass__ = _MutableCompositeMeta
@@ -550,19 +563,21 @@ class MutableComposite(MutableBase):
"""Associate this wrapper with all future mapped composites
of the given type.
- This is a convenience method that calls ``associate_with_attribute`` automatically.
+ This is a convenience method that calls ``associate_with_attribute``
+ automatically.
"""
def listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
- if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
- cls._listen_on_attribute(getattr(class_, prop.key), False, class_)
+ if (hasattr(prop, 'composite_class') and
+ issubclass(prop.composite_class, cls)):
+ cls._listen_on_attribute(
+ getattr(class_, prop.key), False, class_)
event.listen(mapper, 'mapper_configured', listen_for_type)
-
class MutableDict(Mutable, dict):
"""A dictionary type that implements :class:`.Mutable`.
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index 968c0a4a9..a2604c379 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -8,10 +8,11 @@
:author: Jason Kirtland
-``orderinglist`` is a helper for mutable ordered relationships. It will intercept
-list operations performed on a relationship collection and automatically
-synchronize changes in list position with an attribute on the related objects.
-(See :ref:`advdatamapping_entitycollections` for more information on the general pattern.)
+``orderinglist`` is a helper for mutable ordered relationships. It will
+intercept list operations performed on a relationship collection and
+automatically synchronize changes in list position with an attribute on the
+related objects. (See :ref:`advdatamapping_entitycollections` for more
+information on the general pattern.)
Example: Two tables that store slides in a presentation. Each slide
has a number of bullet points, displayed in order by the 'position'
@@ -41,15 +42,15 @@ affected rows when changes are made.
})
mapper(Bullet, bullets_table)
-The standard relationship mapping will produce a list-like attribute on each Slide
-containing all related Bullets, but coping with changes in ordering is totally
-your responsibility. If you insert a Bullet into that list, there is no
-magic- it won't have a position attribute unless you assign it it one, and
+The standard relationship mapping will produce a list-like attribute on each
+Slide containing all related Bullets, but coping with changes in ordering is
+totally your responsibility. If you insert a Bullet into that list, there is
+no magic - it won't have a position attribute unless you assign it it one, and
you'll need to manually renumber all the subsequent Bullets in the list to
accommodate the insert.
-An ``orderinglist`` can automate this and manage the 'position' attribute on all
-related bullets for you.
+An ``orderinglist`` can automate this and manage the 'position' attribute on
+all related bullets for you.
.. sourcecode:: python+sql
@@ -69,18 +70,20 @@ related bullets for you.
s.bullets[2].position
>>> 2
-Use the ``ordering_list`` function to set up the ``collection_class`` on relationships
-(as in the mapper example above). This implementation depends on the list
-starting in the proper order, so be SURE to put an order_by on your relationship.
+Use the ``ordering_list`` function to set up the ``collection_class`` on
+relationships (as in the mapper example above). This implementation depends
+on the list starting in the proper order, so be SURE to put an order_by on
+your relationship.
.. warning::
``ordering_list`` only provides limited functionality when a primary
- key column or unique column is the target of the sort. Since changing the order of
- entries often means that two rows must trade values, this is not possible when
- the value is constrained by a primary key or unique constraint, since one of the rows
- would temporarily have to point to a third available value so that the other row
- could take its old value. ``ordering_list`` doesn't do any of this for you,
+ key column or unique column is the target of the sort. Since changing the
+ order of entries often means that two rows must trade values, this is not
+ possible when the value is constrained by a primary key or unique
+ constraint, since one of the rows would temporarily have to point to a
+ third available value so that the other row could take its old
+ value. ``ordering_list`` doesn't do any of this for you,
nor does SQLAlchemy itself.
``ordering_list`` takes the name of the related object's ordering attribute as
@@ -100,14 +103,14 @@ index to any value you require.
from ..orm.collections import collection
from .. import util
-__all__ = [ 'ordering_list' ]
+__all__ = ['ordering_list']
def ordering_list(attr, count_from=None, **kw):
"""Prepares an OrderingList factory for use in mapper definitions.
- Returns an object suitable for use as an argument to a Mapper relationship's
- ``collection_class`` option. Arguments are:
+ Returns an object suitable for use as an argument to a Mapper
+ relationship's ``collection_class`` option. Arguments are:
attr
Name of the mapped attribute to use for storage and retrieval of
@@ -125,17 +128,22 @@ def ordering_list(attr, count_from=None, **kw):
kw = _unsugar_count_from(count_from=count_from, **kw)
return lambda: OrderingList(attr, **kw)
+
# Ordering utility functions
+
+
def count_from_0(index, collection):
"""Numbering function: consecutive integers starting at 0."""
return index
+
def count_from_1(index, collection):
"""Numbering function: consecutive integers starting at 1."""
return index + 1
+
def count_from_n_factory(start):
"""Numbering function: consecutive integers starting at arbitrary start."""
@@ -147,6 +155,7 @@ def count_from_n_factory(start):
pass
return f
+
def _unsugar_count_from(**kw):
"""Builds counting functions from keyword arguments.
@@ -164,6 +173,7 @@ def _unsugar_count_from(**kw):
kw['ordering_func'] = count_from_n_factory(count_from)
return kw
+
class OrderingList(list):
"""A custom list that manages position information for its children.
@@ -188,9 +198,10 @@ class OrderingList(list):
Name of the attribute that stores the object's order in the
relationship.
- :param ordering_func: Optional. A function that maps the position in the Python list to a
- value to store in the ``ordering_attr``. Values returned are
- usually (but need not be!) integers.
+ :param ordering_func: Optional. A function that maps the position in
+ the Python list to a value to store in the
+ ``ordering_attr``. Values returned are usually (but need not be!)
+ integers.
An ``ordering_func`` is called with two positional parameters: the
index of the element in the list, and the list itself.
@@ -323,6 +334,7 @@ class OrderingList(list):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
+
def _reconstitute(cls, dict_, items):
""" Reconstitute an ``OrderingList``.
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
index 8a5882107..3ed41f48a 100644
--- a/lib/sqlalchemy/ext/serializer.py
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -39,18 +39,19 @@ The serializer module is only appropriate for query structures. It is not
needed for:
* instances of user-defined classes. These contain no references to engines,
- sessions or expression constructs in the typical case and can be serialized directly.
+ sessions or expression constructs in the typical case and can be serialized
+ directly.
-* Table metadata that is to be loaded entirely from the serialized structure (i.e. is
- not already declared in the application). Regular pickle.loads()/dumps() can
- be used to fully dump any ``MetaData`` object, typically one which was reflected
- from an existing database at some previous point in time. The serializer module
- is specifically for the opposite case, where the Table metadata is already present
- in memory.
+* Table metadata that is to be loaded entirely from the serialized structure
+ (i.e. is not already declared in the application). Regular
+ pickle.loads()/dumps() can be used to fully dump any ``MetaData`` object,
+ typically one which was reflected from an existing database at some previous
+ point in time. The serializer module is specifically for the opposite case,
+ where the Table metadata is already present in memory.
"""
-from ..orm import class_mapper, Query
+from ..orm import class_mapper
from ..orm.session import Session
from ..orm.mapper import Mapper
from ..orm.attributes import QueryableAttribute
@@ -78,7 +79,6 @@ b64decode = base64.b64decode
__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
-
def Serializer(*args, **kw):
pickler = pickle.Pickler(*args, **kw)
@@ -107,6 +107,7 @@ def Serializer(*args, **kw):
our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)')
+
def Deserializer(file, metadata=None, scoped_session=None, engine=None):
unpickler = pickle.Unpickler(file)
@@ -147,15 +148,15 @@ def Deserializer(file, metadata=None, scoped_session=None, engine=None):
unpickler.persistent_load = persistent_load
return unpickler
+
def dumps(obj, protocol=0):
buf = byte_buffer()
pickler = Serializer(buf, protocol)
pickler.dump(obj)
return buf.getvalue()
+
def loads(data, metadata=None, scoped_session=None, engine=None):
buf = byte_buffer(data)
unpickler = Deserializer(buf, metadata, scoped_session, engine)
return unpickler.load()
-
-