summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/ext
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/ext')
-rw-r--r--lib/sqlalchemy/ext/associationproxy.py48
-rw-r--r--lib/sqlalchemy/ext/automap.py417
-rw-r--r--lib/sqlalchemy/ext/compiler.py12
-rw-r--r--lib/sqlalchemy/ext/declarative/__init__.py16
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py31
-rw-r--r--lib/sqlalchemy/ext/declarative/base.py112
-rw-r--r--lib/sqlalchemy/ext/declarative/clsregistry.py30
-rw-r--r--lib/sqlalchemy/ext/horizontal_shard.py16
-rw-r--r--lib/sqlalchemy/ext/hybrid.py7
-rw-r--r--lib/sqlalchemy/ext/instrumentation.py6
-rw-r--r--lib/sqlalchemy/ext/mutable.py21
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py22
-rw-r--r--lib/sqlalchemy/ext/serializer.py12
13 files changed, 399 insertions, 351 deletions
diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py
index 92816310a..a987ab413 100644
--- a/lib/sqlalchemy/ext/associationproxy.py
+++ b/lib/sqlalchemy/ext/associationproxy.py
@@ -85,13 +85,13 @@ ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY')
"""
+
class AssociationProxy(interfaces._InspectionAttr):
"""A descriptor that presents a read/write view of an object attribute."""
is_attribute = False
extension_type = ASSOCIATION_PROXY
-
def __init__(self, target_collection, attr, creator=None,
getset_factory=None, proxy_factory=None,
proxy_bulk_set=None):
@@ -230,7 +230,7 @@ class AssociationProxy(interfaces._InspectionAttr):
@util.memoized_property
def _value_is_scalar(self):
return not self._get_property().\
- mapper.get_property(self.value_attr).uselist
+ mapper.get_property(self.value_attr).uselist
@util.memoized_property
def _target_is_object(self):
@@ -349,8 +349,8 @@ class AssociationProxy(interfaces._InspectionAttr):
proxy.update(values)
else:
raise exc.ArgumentError(
- 'no proxy_bulk_set supplied for custom '
- 'collection_class implementation')
+ 'no proxy_bulk_set supplied for custom '
+ 'collection_class implementation')
@property
def _comparator(self):
@@ -378,12 +378,12 @@ class AssociationProxy(interfaces._InspectionAttr):
# the "can't call any() on a scalar" msg is raised.
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
- value_expr
- )
+ value_expr
+ )
else:
return self._comparator.any(
- value_expr
- )
+ value_expr
+ )
def has(self, criterion=None, **kwargs):
"""Produce a proxied 'has' expression using EXISTS.
@@ -397,14 +397,14 @@ class AssociationProxy(interfaces._InspectionAttr):
if self._target_is_object:
return self._comparator.has(
- getattr(self.target_class, self.value_attr).\
- has(criterion, **kwargs)
- )
+ getattr(self.target_class, self.value_attr).
+ has(criterion, **kwargs)
+ )
else:
if criterion is not None or kwargs:
raise exc.ArgumentError(
- "Non-empty has() not allowed for "
- "column-targeted association proxy; use ==")
+ "Non-empty has() not allowed for "
+ "column-targeted association proxy; use ==")
return self._comparator.has()
def contains(self, obj):
@@ -429,9 +429,9 @@ class AssociationProxy(interfaces._InspectionAttr):
# is only allowed with a scalar.
if obj is None:
return or_(
- self._comparator.has(**{self.value_attr: obj}),
- self._comparator == None
- )
+ self._comparator.has(**{self.value_attr: obj}),
+ self._comparator == None
+ )
else:
return self._comparator.has(**{self.value_attr: obj})
@@ -439,7 +439,7 @@ class AssociationProxy(interfaces._InspectionAttr):
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
return self._comparator.has(
- getattr(self.target_class, self.value_attr) != obj)
+ getattr(self.target_class, self.value_attr) != obj)
class _lazy_collection(object):
@@ -451,8 +451,8 @@ class _lazy_collection(object):
obj = self.ref()
if obj is None:
raise exc.InvalidRequestError(
- "stale association proxy, parent object has gone out of "
- "scope")
+ "stale association proxy, parent object has gone out of "
+ "scope")
return getattr(obj, self.target)
def __getstate__(self):
@@ -698,7 +698,7 @@ class _AssociationList(_AssociationCollection):
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(list, func_name)):
+ not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
@@ -835,8 +835,8 @@ class _AssociationDict(_AssociationCollection):
self[k] = v
except ValueError:
raise ValueError(
- "dictionary update sequence "
- "requires 2-element tuples")
+ "dictionary update sequence "
+ "requires 2-element tuples")
for key, value in kw:
self[key] = value
@@ -849,7 +849,7 @@ class _AssociationDict(_AssociationCollection):
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(dict, func_name)):
+ not func.__doc__ and hasattr(dict, func_name)):
func.__doc__ = getattr(dict, func_name).__doc__
del func_name, func
@@ -1049,6 +1049,6 @@ class _AssociationSet(_AssociationCollection):
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(set, func_name)):
+ not func.__doc__ and hasattr(set, func_name)):
func.__doc__ = getattr(set, func_name).__doc__
del func_name, func
diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py
index 1da65011d..17ebef5b5 100644
--- a/lib/sqlalchemy/ext/automap.py
+++ b/lib/sqlalchemy/ext/automap.py
@@ -60,7 +60,8 @@ asking it to reflect the schema and produce mappings::
session.add(Address(email_address="foo@bar.com", user=User(name="foo")))
session.commit()
- # collection-based relationships are by default named "<classname>_collection"
+ # collection-based relationships are by default named
+ # "<classname>_collection"
print (u1.address_collection)
Above, calling :meth:`.AutomapBase.prepare` while passing along the
@@ -72,16 +73,17 @@ generated automatically. The :class:`.ForeignKeyConstraint` objects which
link the various tables together will be used to produce new, bidirectional
:func:`.relationship` objects between classes. The classes and relationships
follow along a default naming scheme that we can customize. At this point,
-our basic mapping consisting of related ``User`` and ``Address`` classes is ready
-to use in the traditional way.
+our basic mapping consisting of related ``User`` and ``Address`` classes is
+ready to use in the traditional way.
Generating Mappings from an Existing MetaData
=============================================
We can pass a pre-declared :class:`.MetaData` object to :func:`.automap_base`.
This object can be constructed in any way, including programmatically, from
-a serialized file, or from itself being reflected using :meth:`.MetaData.reflect`.
-Below we illustrate a combination of reflection and explicit table declaration::
+a serialized file, or from itself being reflected using
+:meth:`.MetaData.reflect`. Below we illustrate a combination of reflection and
+explicit table declaration::
from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey
engine = create_engine("sqlite:///mydatabase.db")
@@ -106,7 +108,8 @@ Below we illustrate a combination of reflection and explicit table declaration::
Base.prepare()
# mapped classes are ready
- User, Address, Order = Base.classes.user, Base.classes.address, Base.classes.user_order
+ User, Address, Order = Base.classes.user, Base.classes.address,\
+ Base.classes.user_order
Specifying Classes Explcitly
============================
@@ -114,11 +117,11 @@ Specifying Classes Explcitly
The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined
explicitly, in a way similar to that of the :class:`.DeferredReflection` class.
Classes that extend from :class:`.AutomapBase` act like regular declarative
-classes, but are not immediately mapped after their construction, and are instead
-mapped when we call :meth:`.AutomapBase.prepare`. The :meth:`.AutomapBase.prepare`
-method will make use of the classes we've established based on the table name
-we use. If our schema contains tables ``user`` and ``address``, we can define
-one or both of the classes to be used::
+classes, but are not immediately mapped after their construction, and are
+instead mapped when we call :meth:`.AutomapBase.prepare`. The
+:meth:`.AutomapBase.prepare` method will make use of the classes we've
+established based on the table name we use. If our schema contains tables
+``user`` and ``address``, we can define one or both of the classes to be used::
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import create_engine
@@ -134,9 +137,9 @@ one or both of the classes to be used::
user_name = Column('name', String)
# override relationships too, if desired.
- # we must use the same name that automap would use for the relationship,
- # and also must refer to the class name that automap will generate
- # for "address"
+ # we must use the same name that automap would use for the
+ # relationship, and also must refer to the class name that automap will
+ # generate for "address"
address_collection = relationship("address", collection_class=set)
# reflect
@@ -158,10 +161,10 @@ one or both of the classes to be used::
Above, one of the more intricate details is that we illustrated overriding
one of the :func:`.relationship` objects that automap would have created.
To do this, we needed to make sure the names match up with what automap
-would normally generate, in that the relationship name would be ``User.address_collection``
-and the name of the class referred to, from automap's perspective, is called
-``address``, even though we are referring to it as ``Address`` within our usage
-of this class.
+would normally generate, in that the relationship name would be
+``User.address_collection`` and the name of the class referred to, from
+automap's perspective, is called ``address``, even though we are referring to
+it as ``Address`` within our usage of this class.
Overriding Naming Schemes
=========================
@@ -212,7 +215,8 @@ scheme for class names and a "pluralizer" for collection names using the
)
From the above mapping, we would now have classes ``User`` and ``Address``,
-where the collection from ``User`` to ``Address`` is called ``User.addresses``::
+where the collection from ``User`` to ``Address`` is called
+``User.addresses``::
User, Address = Base.classes.User, Base.classes.Address
@@ -223,7 +227,8 @@ Relationship Detection
The vast majority of what automap accomplishes is the generation of
:func:`.relationship` structures based on foreign keys. The mechanism
-by which this works for many-to-one and one-to-many relationships is as follows:
+by which this works for many-to-one and one-to-many relationships is as
+follows:
1. A given :class:`.Table`, known to be mapped to a particular class,
is examined for :class:`.ForeignKeyConstraint` objects.
@@ -232,10 +237,10 @@ by which this works for many-to-one and one-to-many relationships is as follows:
object present is matched up to the class to which it is to be mapped,
if any, else it is skipped.
-3. As the :class:`.ForeignKeyConstraint` we are examining corresponds to a reference
- from the immediate mapped class,
- the relationship will be set up as a many-to-one referring to the referred class;
- a corresponding one-to-many backref will be created on the referred class referring
+3. As the :class:`.ForeignKeyConstraint` we are examining corresponds to a
+ reference from the immediate mapped class, the relationship will be set up
+ as a many-to-one referring to the referred class; a corresponding
+ one-to-many backref will be created on the referred class referring
to this class.
4. The names of the relationships are determined using the
@@ -248,15 +253,15 @@ by which this works for many-to-one and one-to-many relationships is as follows:
name will be derived.
5. The classes are inspected for an existing mapped property matching these
- names. If one is detected on one side, but none on the other side, :class:`.AutomapBase`
- attempts to create a relationship on the missing side, then uses the
- :paramref:`.relationship.back_populates` parameter in order to point
- the new relationship to the other side.
+ names. If one is detected on one side, but none on the other side,
+ :class:`.AutomapBase` attempts to create a relationship on the missing side,
+ then uses the :paramref:`.relationship.back_populates` parameter in order to
+ point the new relationship to the other side.
6. In the usual case where no relationship is on either side,
- :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the "many-to-one"
- side and matches it to the other using the :paramref:`.relationship.backref`
- parameter.
+ :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the
+ "many-to-one" side and matches it to the other using the
+ :paramref:`.relationship.backref` parameter.
7. Production of the :func:`.relationship` and optionally the :func:`.backref`
is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship`
@@ -288,7 +293,7 @@ options along to all one-to-many relationships::
# make use of the built-in function to actually return
# the result.
return generate_relationship(base, direction, return_fn,
- attrname, local_cls, referred_cls, **kw)
+ attrname, local_cls, referred_cls, **kw)
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import create_engine
@@ -307,16 +312,17 @@ Many-to-Many relationships
those which contain a ``secondary`` argument. The process for producing these
is as follows:
-1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint` objects,
- before any mapped class has been assigned to it.
+1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint`
+ objects, before any mapped class has been assigned to it.
2. If the table contains two and exactly two :class:`.ForeignKeyConstraint`
objects, and all columns within this table are members of these two
:class:`.ForeignKeyConstraint` objects, the table is assumed to be a
"secondary" table, and will **not be mapped directly**.
-3. The two (or one, for self-referential) external tables to which the :class:`.Table`
- refers to are matched to the classes to which they will be mapped, if any.
+3. The two (or one, for self-referential) external tables to which the
+ :class:`.Table` refers to are matched to the classes to which they will be
+ mapped, if any.
4. If mapped classes for both sides are located, a many-to-many bi-directional
:func:`.relationship` / :func:`.backref` pair is created between the two
@@ -330,8 +336,8 @@ Relationships with Inheritance
------------------------------
:mod:`.sqlalchemy.ext.automap` will not generate any relationships between
-two classes that are in an inheritance relationship. That is, with two classes
-given as follows::
+two classes that are in an inheritance relationship. That is, with two
+classes given as follows::
class Employee(Base):
__tablename__ = 'employee'
@@ -348,8 +354,8 @@ given as follows::
'polymorphic_identity':'engineer',
}
-The foreign key from ``Engineer`` to ``Employee`` is used not for a relationship,
-but to establish joined inheritance between the two classes.
+The foreign key from ``Engineer`` to ``Employee`` is used not for a
+relationship, but to establish joined inheritance between the two classes.
Note that this means automap will not generate *any* relationships
for foreign keys that link from a subclass to a superclass. If a mapping
@@ -373,7 +379,8 @@ SQLAlchemy can guess::
id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
favorite_employee_id = Column(Integer, ForeignKey('employee.id'))
- favorite_employee = relationship(Employee, foreign_keys=favorite_employee_id)
+ favorite_employee = relationship(Employee,
+ foreign_keys=favorite_employee_id)
__mapper_args__ = {
'polymorphic_identity':'engineer',
@@ -387,8 +394,8 @@ Using Automap with Explicit Declarations
As noted previously, automap has no dependency on reflection, and can make
use of any collection of :class:`.Table` objects within a :class:`.MetaData`
collection. From this, it follows that automap can also be used
-generate missing relationships given an otherwise complete model that fully defines
-table metadata::
+generate missing relationships given an otherwise complete model that fully
+defines table metadata::
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import Column, Integer, String, ForeignKey
@@ -420,12 +427,12 @@ table metadata::
Above, given mostly complete ``User`` and ``Address`` mappings, the
:class:`.ForeignKey` which we defined on ``Address.user_id`` allowed a
-bidirectional relationship pair ``Address.user`` and ``User.address_collection``
-to be generated on the mapped classes.
+bidirectional relationship pair ``Address.user`` and
+``User.address_collection`` to be generated on the mapped classes.
-Note that when subclassing :class:`.AutomapBase`, the :meth:`.AutomapBase.prepare`
-method is required; if not called, the classes we've declared are in an
-un-mapped state.
+Note that when subclassing :class:`.AutomapBase`,
+the :meth:`.AutomapBase.prepare` method is required; if not called, the classes
+we've declared are in an un-mapped state.
"""
@@ -459,15 +466,16 @@ def classname_for_table(base, tablename, table):
.. note::
- In Python 2, the string used for the class name **must** be a non-Unicode
- object, e.g. a ``str()`` object. The ``.name`` attribute of
- :class:`.Table` is typically a Python unicode subclass, so the ``str()``
- function should be applied to this name, after accounting for any non-ASCII
- characters.
+ In Python 2, the string used for the class name **must** be a
+ non-Unicode object, e.g. a ``str()`` object. The ``.name`` attribute
+ of :class:`.Table` is typically a Python unicode subclass, so the
+ ``str()`` function should be applied to this name, after accounting for
+ any non-ASCII characters.
"""
return str(tablename)
+
def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
"""Return the attribute name that should be used to refer from one
class to another, for a scalar object reference.
@@ -492,7 +500,9 @@ def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
"""
return referred_cls.__name__.lower()
-def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
+
+def name_for_collection_relationship(
+ base, local_cls, referred_cls, constraint):
"""Return the attribute name that should be used to refer from one
class to another, for a collection reference.
@@ -501,7 +511,8 @@ def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
return referred_cls.__name__.lower() + "_collection"
Alternate implementations
- can be specified using the :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
+ can be specified using the
+ :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
parameter.
:param base: the :class:`.AutomapBase` class doing the prepare.
@@ -516,7 +527,9 @@ def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
"""
return referred_cls.__name__.lower() + "_collection"
-def generate_relationship(base, direction, return_fn, attrname, local_cls, referred_cls, **kw):
+
+def generate_relationship(
+ base, direction, return_fn, attrname, local_cls, referred_cls, **kw):
"""Generate a :func:`.relationship` or :func:`.backref` on behalf of two
mapped classes.
@@ -538,11 +551,11 @@ def generate_relationship(base, direction, return_fn, attrname, local_cls, refer
be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`.
:param return_fn: the function that is used by default to create the
- relationship. This will be either :func:`.relationship` or :func:`.backref`.
- The :func:`.backref` function's result will be used to produce a new
- :func:`.relationship` in a second step, so it is critical that user-defined
- implementations correctly differentiate between the two functions, if
- a custom relationship function is being used.
+ relationship. This will be either :func:`.relationship` or
+ :func:`.backref`. The :func:`.backref` function's result will be used to
+ produce a new :func:`.relationship` in a second step, so it is critical
+ that user-defined implementations correctly differentiate between the two
+ functions, if a custom relationship function is being used.
:attrname: the attribute name to which this relationship is being assigned.
If the value of :paramref:`.generate_relationship.return_fn` is the
@@ -552,8 +565,8 @@ def generate_relationship(base, direction, return_fn, attrname, local_cls, refer
:param local_cls: the "local" class to which this relationship or backref
will be locally present.
- :param referred_cls: the "referred" class to which the relationship or backref
- refers to.
+ :param referred_cls: the "referred" class to which the relationship or
+ backref refers to.
:param \**kw: all additional keyword arguments are passed along to the
function.
@@ -569,6 +582,7 @@ def generate_relationship(base, direction, return_fn, attrname, local_cls, refer
else:
raise TypeError("Unknown relationship function: %s" % return_fn)
+
class AutomapBase(object):
"""Base class for an "automap" schema.
@@ -601,44 +615,45 @@ class AutomapBase(object):
"""
@classmethod
- def prepare(cls,
- engine=None,
- reflect=False,
- classname_for_table=classname_for_table,
- collection_class=list,
- name_for_scalar_relationship=name_for_scalar_relationship,
- name_for_collection_relationship=name_for_collection_relationship,
- generate_relationship=generate_relationship):
-
+ def prepare(
+ cls,
+ engine=None,
+ reflect=False,
+ classname_for_table=classname_for_table,
+ collection_class=list,
+ name_for_scalar_relationship=name_for_scalar_relationship,
+ name_for_collection_relationship=name_for_collection_relationship,
+ generate_relationship=generate_relationship):
"""Extract mapped classes and relationships from the :class:`.MetaData` and
perform mappings.
:param engine: an :class:`.Engine` or :class:`.Connection` with which
to perform schema reflection, if specified.
- If the :paramref:`.AutomapBase.prepare.reflect` argument is False, this
- object is not used.
+ If the :paramref:`.AutomapBase.prepare.reflect` argument is False,
+ this object is not used.
:param reflect: if True, the :meth:`.MetaData.reflect` method is called
on the :class:`.MetaData` associated with this :class:`.AutomapBase`.
- The :class:`.Engine` passed via :paramref:`.AutomapBase.prepare.engine` will
- be used to perform the reflection if present; else, the :class:`.MetaData`
- should already be bound to some engine else the operation will fail.
+ The :class:`.Engine` passed via
+ :paramref:`.AutomapBase.prepare.engine` will be used to perform the
+ reflection if present; else, the :class:`.MetaData` should already be
+ bound to some engine else the operation will fail.
:param classname_for_table: callable function which will be used to
produce new class names, given a table name. Defaults to
:func:`.classname_for_table`.
- :param name_for_scalar_relationship: callable function which will be used
- to produce relationship names for scalar relationships. Defaults to
- :func:`.name_for_scalar_relationship`.
+ :param name_for_scalar_relationship: callable function which will be
+ used to produce relationship names for scalar relationships. Defaults
+ to :func:`.name_for_scalar_relationship`.
- :param name_for_collection_relationship: callable function which will be used
- to produce relationship names for collection-oriented relationships. Defaults to
- :func:`.name_for_collection_relationship`.
+ :param name_for_collection_relationship: callable function which will
+ be used to produce relationship names for collection-oriented
+ relationships. Defaults to :func:`.name_for_collection_relationship`.
:param generate_relationship: callable function which will be used to
- actually generate :func:`.relationship` and :func:`.backref` constructs.
- Defaults to :func:`.generate_relationship`.
+ actually generate :func:`.relationship` and :func:`.backref`
+ constructs. Defaults to :func:`.generate_relationship`.
:param collection_class: the Python collection class that will be used
when a new :func:`.relationship` object is created that represents a
@@ -647,16 +662,16 @@ class AutomapBase(object):
"""
if reflect:
cls.metadata.reflect(
- engine,
- extend_existing=True,
- autoload_replace=False
- )
+ engine,
+ extend_existing=True,
+ autoload_replace=False
+ )
table_to_map_config = dict(
- (m.local_table, m)
- for m in _DeferredMapperConfig.
- classes_for_base(cls, sort=False)
- )
+ (m.local_table, m)
+ for m in _DeferredMapperConfig.
+ classes_for_base(cls, sort=False)
+ )
many_to_many = []
@@ -678,25 +693,24 @@ class AutomapBase(object):
for map_config in table_to_map_config.values():
_relationships_for_fks(cls,
- map_config,
- table_to_map_config,
- collection_class,
- name_for_scalar_relationship,
- name_for_collection_relationship,
- generate_relationship)
+ map_config,
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship)
for lcl_m2m, rem_m2m, m2m_const, table in many_to_many:
_m2m_relationship(cls, lcl_m2m, rem_m2m, m2m_const, table,
- table_to_map_config,
- collection_class,
- name_for_scalar_relationship,
- name_for_collection_relationship,
- generate_relationship)
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship)
for map_config in _DeferredMapperConfig.classes_for_base(cls):
map_config.map()
-
_sa_decl_prepare = True
"""Indicate that the mapping of classes should be deferred.
@@ -718,6 +732,7 @@ class AutomapBase(object):
"""
+
def automap_base(declarative_base=None, **kw):
"""Produce a declarative automap base.
@@ -731,8 +746,8 @@ def automap_base(declarative_base=None, **kw):
:param declarative_base: an existing class produced by
:func:`.declarative.declarative_base`. When this is passed, the function
- no longer invokes :func:`.declarative.declarative_base` itself, and all other
- keyword arguments are ignored.
+ no longer invokes :func:`.declarative.declarative_base` itself, and all
+ other keyword arguments are ignored.
:param \**kw: keyword arguments are passed along to
:func:`.declarative.declarative_base`.
@@ -744,20 +759,21 @@ def automap_base(declarative_base=None, **kw):
Base = declarative_base
return type(
- Base.__name__,
- (AutomapBase, Base,),
- {"__abstract__": True, "classes": util.Properties({})}
- )
+ Base.__name__,
+ (AutomapBase, Base,),
+ {"__abstract__": True, "classes": util.Properties({})}
+ )
+
def _is_many_to_many(automap_base, table):
fk_constraints = [const for const in table.constraints
- if isinstance(const, ForeignKeyConstraint)]
+ if isinstance(const, ForeignKeyConstraint)]
if len(fk_constraints) != 2:
return None, None, None
cols = sum(
- [[fk.parent for fk in fk_constraint.elements]
- for fk_constraint in fk_constraints], [])
+ [[fk.parent for fk in fk_constraint.elements]
+ for fk_constraint in fk_constraints], [])
if set(cols) != set(table.c):
return None, None, None
@@ -768,11 +784,12 @@ def _is_many_to_many(automap_base, table):
fk_constraints
)
+
def _relationships_for_fks(automap_base, map_config, table_to_map_config,
- collection_class,
- name_for_scalar_relationship,
- name_for_collection_relationship,
- generate_relationship):
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship):
local_table = map_config.local_table
local_cls = map_config.cls
@@ -787,62 +804,73 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
continue
referred_cls = referred_cfg.cls
- if local_cls is not referred_cls and issubclass(local_cls, referred_cls):
+ if local_cls is not referred_cls and issubclass(
+ local_cls, referred_cls):
continue
relationship_name = name_for_scalar_relationship(
- automap_base,
- local_cls,
- referred_cls, constraint)
+ automap_base,
+ local_cls,
+ referred_cls, constraint)
backref_name = name_for_collection_relationship(
- automap_base,
- referred_cls,
- local_cls,
- constraint
- )
+ automap_base,
+ referred_cls,
+ local_cls,
+ constraint
+ )
create_backref = backref_name not in referred_cfg.properties
if relationship_name not in map_config.properties:
if create_backref:
- backref_obj = generate_relationship(automap_base,
- interfaces.ONETOMANY, backref,
- backref_name, referred_cls, local_cls,
- collection_class=collection_class)
+ backref_obj = generate_relationship(
+ automap_base,
+ interfaces.ONETOMANY, backref,
+ backref_name, referred_cls, local_cls,
+ collection_class=collection_class)
else:
backref_obj = None
rel = generate_relationship(automap_base,
- interfaces.MANYTOONE,
- relationship,
- relationship_name,
- local_cls, referred_cls,
- foreign_keys=[fk.parent for fk in constraint.elements],
- backref=backref_obj,
- remote_side=[fk.column for fk in constraint.elements]
- )
+ interfaces.MANYTOONE,
+ relationship,
+ relationship_name,
+ local_cls, referred_cls,
+ foreign_keys=[
+ fk.parent
+ for fk in constraint.elements],
+ backref=backref_obj,
+ remote_side=[
+ fk.column
+ for fk in constraint.elements]
+ )
if rel is not None:
map_config.properties[relationship_name] = rel
if not create_backref:
- referred_cfg.properties[backref_name].back_populates = relationship_name
+ referred_cfg.properties[
+ backref_name].back_populates = relationship_name
elif create_backref:
rel = generate_relationship(automap_base,
- interfaces.ONETOMANY,
- relationship,
- backref_name,
- referred_cls, local_cls,
- foreign_keys=[fk.parent for fk in constraint.elements],
- back_populates=relationship_name,
- collection_class=collection_class)
+ interfaces.ONETOMANY,
+ relationship,
+ backref_name,
+ referred_cls, local_cls,
+ foreign_keys=[
+ fk.parent
+ for fk in constraint.elements],
+ back_populates=relationship_name,
+ collection_class=collection_class)
if rel is not None:
referred_cfg.properties[backref_name] = rel
- map_config.properties[relationship_name].back_populates = backref_name
+ map_config.properties[
+ relationship_name].back_populates = backref_name
+
def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table,
- table_to_map_config,
- collection_class,
- name_for_scalar_relationship,
- name_for_collection_relationship,
- generate_relationship):
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship):
map_config = table_to_map_config.get(lcl_m2m, None)
referred_cfg = table_to_map_config.get(rem_m2m, None)
@@ -853,56 +881,67 @@ def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table,
referred_cls = referred_cfg.cls
relationship_name = name_for_collection_relationship(
- automap_base,
- local_cls,
- referred_cls, m2m_const[0])
+ automap_base,
+ local_cls,
+ referred_cls, m2m_const[0])
backref_name = name_for_collection_relationship(
- automap_base,
- referred_cls,
- local_cls,
- m2m_const[1]
- )
+ automap_base,
+ referred_cls,
+ local_cls,
+ m2m_const[1]
+ )
create_backref = backref_name not in referred_cfg.properties
if relationship_name not in map_config.properties:
if create_backref:
- backref_obj = generate_relationship(automap_base,
- interfaces.MANYTOMANY,
- backref,
- backref_name,
- referred_cls, local_cls,
- collection_class=collection_class
- )
+ backref_obj = generate_relationship(
+ automap_base,
+ interfaces.MANYTOMANY,
+ backref,
+ backref_name,
+ referred_cls, local_cls,
+ collection_class=collection_class
+ )
else:
backref_obj = None
rel = generate_relationship(automap_base,
- interfaces.MANYTOMANY,
- relationship,
- relationship_name,
- local_cls, referred_cls,
- secondary=table,
- primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
- secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
- backref=backref_obj,
- collection_class=collection_class
- )
+ interfaces.MANYTOMANY,
+ relationship,
+ relationship_name,
+ local_cls, referred_cls,
+ secondary=table,
+ primaryjoin=and_(
+ fk.column == fk.parent
+ for fk in m2m_const[0].elements),
+ secondaryjoin=and_(
+ fk.column == fk.parent
+ for fk in m2m_const[1].elements),
+ backref=backref_obj,
+ collection_class=collection_class
+ )
if rel is not None:
map_config.properties[relationship_name] = rel
if not create_backref:
- referred_cfg.properties[backref_name].back_populates = relationship_name
+ referred_cfg.properties[
+ backref_name].back_populates = relationship_name
elif create_backref:
rel = generate_relationship(automap_base,
- interfaces.MANYTOMANY,
- relationship,
- backref_name,
- referred_cls, local_cls,
- secondary=table,
- primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
- secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
- back_populates=relationship_name,
- collection_class=collection_class)
+ interfaces.MANYTOMANY,
+ relationship,
+ backref_name,
+ referred_cls, local_cls,
+ secondary=table,
+ primaryjoin=and_(
+ fk.column == fk.parent
+ for fk in m2m_const[1].elements),
+ secondaryjoin=and_(
+ fk.column == fk.parent
+ for fk in m2m_const[0].elements),
+ back_populates=relationship_name,
+ collection_class=collection_class)
if rel is not None:
referred_cfg.properties[backref_name] = rel
- map_config.properties[relationship_name].back_populates = backref_name
+ map_config.properties[
+ relationship_name].back_populates = backref_name
diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py
index 03fde2668..8d169aa57 100644
--- a/lib/sqlalchemy/ext/compiler.py
+++ b/lib/sqlalchemy/ext/compiler.py
@@ -58,7 +58,8 @@ invoked for the dialect in use::
@compiles(AlterColumn, 'postgresql')
def visit_alter_column(element, compiler, **kw):
- return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name)
+ return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name,
+ element.column.name)
The second ``visit_alter_table`` will be invoked when any ``postgresql``
dialect is used.
@@ -93,7 +94,8 @@ method which can be used for compilation of embedded attributes::
Produces::
- "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z FROM mytable WHERE mytable.x > :x_1)"
+ "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z
+ FROM mytable WHERE mytable.x > :x_1)"
.. note::
@@ -408,7 +410,7 @@ def compiles(class_, *specs):
# TODO: why is the lambda needed ?
setattr(class_, '_compiler_dispatch',
- lambda *arg, **kw: existing(*arg, **kw))
+ lambda *arg, **kw: existing(*arg, **kw))
setattr(class_, '_compiler_dispatcher', existing)
if specs:
@@ -444,6 +446,6 @@ class _dispatcher(object):
fn = self.specs['default']
except KeyError:
raise exc.CompileError(
- "%s construct has no default "
- "compilation handler." % type(element))
+ "%s construct has no default "
+ "compilation handler." % type(element))
return fn(element, compiler, **kw)
diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py
index eba6cb808..3cbc85c0c 100644
--- a/lib/sqlalchemy/ext/declarative/__init__.py
+++ b/lib/sqlalchemy/ext/declarative/__init__.py
@@ -955,9 +955,9 @@ Mapping a class using the above mixin, we will get an error like::
sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not
yet associated with a Table.
-This is because the ``target_id`` :class:`.Column` we've called upon in our ``target()``
-method is not the same :class:`.Column` that declarative is actually going to map
-to our table.
+This is because the ``target_id`` :class:`.Column` we've called upon in our
+``target()`` method is not the same :class:`.Column` that declarative is
+actually going to map to our table.
The condition above is resolved using a lambda::
@@ -1220,8 +1220,8 @@ assumed to be completed and the 'configure' step has finished::
``__declare_first__()``
~~~~~~~~~~~~~~~~~~~~~~~
-Like ``__declare_last__()``, but is called at the beginning of mapper configuration
-via the :meth:`.MapperEvents.before_configured` event::
+Like ``__declare_last__()``, but is called at the beginning of mapper
+configuration via the :meth:`.MapperEvents.before_configured` event::
class MyClass(Base):
@classmethod
@@ -1312,6 +1312,6 @@ from .api import declarative_base, synonym_for, comparable_using, \
__all__ = ['declarative_base', 'synonym_for', 'has_inherited_table',
- 'comparable_using', 'instrument_declarative', 'declared_attr',
- 'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta',
- 'DeferredReflection']
+ 'comparable_using', 'instrument_declarative', 'declared_attr',
+ 'ConcreteBase', 'AbstractConcreteBase', 'DeclarativeMeta',
+ 'DeferredReflection']
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index 5f3d0742e..daf8bffb5 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -9,8 +9,8 @@
from ...schema import Table, MetaData
from ...orm import synonym as _orm_synonym, mapper,\
- comparable_property,\
- interfaces, properties
+ comparable_property,\
+ interfaces, properties
from ...orm.util import polymorphic_union
from ...orm.base import _mapper_or_none
from ...util import OrderedDict
@@ -18,11 +18,12 @@ from ... import exc
import weakref
from .base import _as_declarative, \
- _declarative_constructor,\
- _DeferredMapperConfig, _add_attribute
+ _declarative_constructor,\
+ _DeferredMapperConfig, _add_attribute
from .clsregistry import _class_resolver
from . import clsregistry
+
def instrument_declarative(cls, registry, metadata):
"""Given a class, configure the class declaratively,
using the given registry, which can be any dictionary, and
@@ -31,8 +32,8 @@ def instrument_declarative(cls, registry, metadata):
"""
if '_decl_class_registry' in cls.__dict__:
raise exc.InvalidRequestError(
- "Class %r already has been "
- "instrumented declaratively" % cls)
+ "Class %r already has been "
+ "instrumented declaratively" % cls)
cls._decl_class_registry = registry
cls.metadata = metadata
_as_declarative(cls, cls.__name__, cls.__dict__)
@@ -245,6 +246,7 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
return metaclass(name, bases, class_dict)
+
def as_declarative(**kw):
"""
Class decorator for :func:`.declarative_base`.
@@ -282,6 +284,7 @@ def as_declarative(**kw):
return decorate
+
class ConcreteBase(object):
"""A helper class for 'concrete' declarative mappings.
@@ -323,7 +326,7 @@ class ConcreteBase(object):
return polymorphic_union(OrderedDict(
(mp.polymorphic_identity, mp.local_table)
for mp in mappers
- ), 'type', 'pjoin')
+ ), 'type', 'pjoin')
@classmethod
def __declare_first__(cls):
@@ -478,7 +481,7 @@ class DeferredReflection(object):
metadata = mapper.class_.metadata
for rel in mapper._props.values():
if isinstance(rel, properties.RelationshipProperty) and \
- rel.secondary is not None:
+ rel.secondary is not None:
if isinstance(rel.secondary, Table):
cls._reflect_table(rel.secondary, engine)
elif isinstance(rel.secondary, _class_resolver):
@@ -506,9 +509,9 @@ class DeferredReflection(object):
@classmethod
def _reflect_table(cls, table, engine):
Table(table.name,
- table.metadata,
- extend_existing=True,
- autoload_replace=False,
- autoload=True,
- autoload_with=engine,
- schema=table.schema)
+ table.metadata,
+ extend_existing=True,
+ autoload_replace=False,
+ autoload=True,
+ autoload_with=engine,
+ schema=table.schema)
diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py
index 41190e407..94baeeb51 100644
--- a/lib/sqlalchemy/ext/declarative/base.py
+++ b/lib/sqlalchemy/ext/declarative/base.py
@@ -20,6 +20,7 @@ from . import clsregistry
import collections
import weakref
+
def _declared_mapping_info(cls):
# deferred mapping
if _DeferredMapperConfig.has_cls(cls):
@@ -59,8 +60,7 @@ def _as_declarative(cls, classname, dict_):
cls.__declare_first__()
if '__abstract__' in base.__dict__ and base.__abstract__:
if (base is cls or
- (base in cls.__bases__ and not _is_declarative_inherits)
- ):
+ (base in cls.__bases__ and not _is_declarative_inherits)):
return
class_mapped = _declared_mapping_info(base) is not None
@@ -68,9 +68,9 @@ def _as_declarative(cls, classname, dict_):
for name, obj in vars(base).items():
if name == '__mapper_args__':
if not mapper_args_fn and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
# don't even invoke __mapper_args__ until
# after we've determined everything about the
# mapped table.
@@ -80,29 +80,29 @@ def _as_declarative(cls, classname, dict_):
mapper_args_fn = lambda: dict(cls.__mapper_args__)
elif name == '__tablename__':
if not tablename and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
tablename = cls.__tablename__
elif name == '__table_args__':
if not table_args and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
table_args = cls.__table_args__
if not isinstance(table_args, (tuple, dict, type(None))):
raise exc.ArgumentError(
- "__table_args__ value must be a tuple, "
- "dict, or None")
+ "__table_args__ value must be a tuple, "
+ "dict, or None")
if base is not cls:
inherited_table_args = True
elif class_mapped:
if isinstance(obj, declarative_props):
util.warn("Regular (i.e. not __special__) "
- "attribute '%s.%s' uses @declared_attr, "
- "but owning class %s is mapped - "
- "not applying to subclass %s."
- % (base.__name__, name, base, cls))
+ "attribute '%s.%s' uses @declared_attr, "
+ "but owning class %s is mapped - "
+ "not applying to subclass %s."
+ % (base.__name__, name, base, cls))
continue
elif base is not cls:
# we're a mixin.
@@ -114,18 +114,18 @@ def _as_declarative(cls, classname, dict_):
continue
if obj.foreign_keys:
raise exc.InvalidRequestError(
- "Columns with foreign keys to other columns "
- "must be declared as @declared_attr callables "
- "on declarative mixin classes. ")
+ "Columns with foreign keys to other columns "
+ "must be declared as @declared_attr callables "
+ "on declarative mixin classes. ")
if name not in dict_ and not (
'__table__' in dict_ and
(obj.name or name) in dict_['__table__'].c
- ) and name not in potential_columns:
+ ) and name not in potential_columns:
potential_columns[name] = \
- column_copies[obj] = \
- obj.copy()
+ column_copies[obj] = \
+ obj.copy()
column_copies[obj]._creation_order = \
- obj._creation_order
+ obj._creation_order
elif isinstance(obj, MapperProperty):
raise exc.InvalidRequestError(
"Mapper properties (i.e. deferred,"
@@ -134,9 +134,9 @@ def _as_declarative(cls, classname, dict_):
"on declarative mixin classes.")
elif isinstance(obj, declarative_props):
dict_[name] = ret = \
- column_copies[obj] = getattr(cls, name)
+ column_copies[obj] = getattr(cls, name)
if isinstance(ret, (Column, MapperProperty)) and \
- ret.doc is None:
+ ret.doc is None:
ret.doc = obj.__doc__
# apply inherited columns as we should
@@ -167,9 +167,8 @@ def _as_declarative(cls, classname, dict_):
value = synonym(value.key)
setattr(cls, k, value)
-
if (isinstance(value, tuple) and len(value) == 1 and
- isinstance(value[0], (Column, MapperProperty))):
+ isinstance(value[0], (Column, MapperProperty))):
util.warn("Ignoring declarative-like tuple value of attribute "
"%s: possibly a copy-and-paste error with a comma "
"left at the end of the line?" % k)
@@ -198,7 +197,7 @@ def _as_declarative(cls, classname, dict_):
if isinstance(c, (ColumnProperty, CompositeProperty)):
for col in c.columns:
if isinstance(col, Column) and \
- col.table is None:
+ col.table is None:
_undefer_column_name(key, col)
if not isinstance(c, CompositeProperty):
name_to_prop_key[col.name].add(key)
@@ -280,7 +279,7 @@ def _as_declarative(cls, classname, dict_):
"Class %r does not have a __table__ or __tablename__ "
"specified and does not inherit from an existing "
"table-mapped class." % cls
- )
+ )
elif inherits:
inherited_mapper = _declared_mapping_info(inherits)
inherited_table = inherited_mapper.local_table
@@ -293,14 +292,14 @@ def _as_declarative(cls, classname, dict_):
raise exc.ArgumentError(
"Can't place __table_args__ on an inherited class "
"with no table."
- )
+ )
# add any columns declared here to the inherited table.
for c in declared_columns:
if c.primary_key:
raise exc.ArgumentError(
"Can't place primary key columns on an inherited "
"class with no table."
- )
+ )
if c.name in inherited_table.c:
if inherited_table.c[c.name] is c:
continue
@@ -311,7 +310,7 @@ def _as_declarative(cls, classname, dict_):
)
inherited_table.append_column(c)
if inherited_mapped_table is not None and \
- inherited_mapped_table is not inherited_table:
+ inherited_mapped_table is not inherited_table:
inherited_mapped_table._refresh_for_new_column(c)
defer_map = hasattr(cls, '_sa_decl_prepare')
@@ -320,12 +319,12 @@ def _as_declarative(cls, classname, dict_):
else:
cfg_cls = _MapperConfig
mt = cfg_cls(mapper_cls,
- cls, table,
- inherits,
- declared_columns,
- column_copies,
- our_stuff,
- mapper_args_fn)
+ cls, table,
+ inherits,
+ declared_columns,
+ column_copies,
+ our_stuff,
+ mapper_args_fn)
if not defer_map:
mt.map()
@@ -335,12 +334,12 @@ class _MapperConfig(object):
mapped_table = None
def __init__(self, mapper_cls,
- cls,
- table,
- inherits,
- declared_columns,
- column_copies,
- properties, mapper_args_fn):
+ cls,
+ table,
+ inherits,
+ declared_columns,
+ column_copies,
+ properties, mapper_args_fn):
self.mapper_cls = mapper_cls
self.cls = cls
self.local_table = table
@@ -350,7 +349,6 @@ class _MapperConfig(object):
self.declared_columns = declared_columns
self.column_copies = column_copies
-
def _prepare_mapper_arguments(self):
properties = self.properties
if self.mapper_args_fn:
@@ -384,7 +382,7 @@ class _MapperConfig(object):
set([c.key for c in inherited_table.c
if c not in inherited_mapper._columntoproperty])
exclude_properties.difference_update(
- [c.key for c in self.declared_columns])
+ [c.key for c in self.declared_columns])
# look through columns in the current mapper that
# are keyed to a propname different than the colname
@@ -413,6 +411,7 @@ class _MapperConfig(object):
**mapper_args
)
+
class _DeferredMapperConfig(_MapperConfig):
_configs = util.OrderedDict()
@@ -433,32 +432,31 @@ class _DeferredMapperConfig(_MapperConfig):
def has_cls(cls, class_):
# 2.6 fails on weakref if class_ is an old style class
return isinstance(class_, type) and \
- weakref.ref(class_) in cls._configs
+ weakref.ref(class_) in cls._configs
@classmethod
def config_for_cls(cls, class_):
return cls._configs[weakref.ref(class_)]
-
@classmethod
def classes_for_base(cls, base_cls, sort=True):
classes_for_base = [m for m in cls._configs.values()
- if issubclass(m.cls, base_cls)]
+ if issubclass(m.cls, base_cls)]
if not sort:
return classes_for_base
all_m_by_cls = dict(
- (m.cls, m)
- for m in classes_for_base
- )
+ (m.cls, m)
+ for m in classes_for_base
+ )
tuples = []
for m_cls in all_m_by_cls:
tuples.extend(
- (all_m_by_cls[base_cls], all_m_by_cls[m_cls])
- for base_cls in m_cls.__bases__
- if base_cls in all_m_by_cls
- )
+ (all_m_by_cls[base_cls], all_m_by_cls[m_cls])
+ for base_cls in m_cls.__bases__
+ if base_cls in all_m_by_cls
+ )
return list(
topological.sort(
tuples,
diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py
index b05c3a647..4595b857a 100644
--- a/lib/sqlalchemy/ext/declarative/clsregistry.py
+++ b/lib/sqlalchemy/ext/declarative/clsregistry.py
@@ -11,7 +11,7 @@ This system allows specification of classes and expressions used in
"""
from ...orm.properties import ColumnProperty, RelationshipProperty, \
- SynonymProperty
+ SynonymProperty
from ...schema import _get_table_key
from ...orm import class_mapper, interfaces
from ... import util
@@ -74,7 +74,7 @@ class _MultipleClassMarker(object):
def __init__(self, classes, on_remove=None):
self.on_remove = on_remove
self.contents = set([
- weakref.ref(item, self._remove_item) for item in classes])
+ weakref.ref(item, self._remove_item) for item in classes])
_registries.add(self)
def __iter__(self):
@@ -121,6 +121,7 @@ class _ModuleMarker(object):
_decl_class_registry.
"""
+
def __init__(self, name, parent):
self.parent = parent
self.name = name
@@ -161,8 +162,8 @@ class _ModuleMarker(object):
existing.add_item(cls)
else:
existing = self.contents[name] = \
- _MultipleClassMarker([cls],
- on_remove=lambda: self._remove_item(name))
+ _MultipleClassMarker([cls],
+ on_remove=lambda: self._remove_item(name))
class _ModNS(object):
@@ -182,7 +183,8 @@ class _ModNS(object):
assert isinstance(value, _MultipleClassMarker)
return value.attempt_get(self.__parent.path, key)
raise AttributeError("Module %r has no mapped classes "
- "registered under the name %r" % (self.__parent.name, key))
+ "registered under the name %r" % (
+ self.__parent.name, key))
class _GetColumns(object):
@@ -194,8 +196,8 @@ class _GetColumns(object):
if mp:
if key not in mp.all_orm_descriptors:
raise exc.InvalidRequestError(
- "Class %r does not have a mapped column named %r"
- % (self.cls, key))
+ "Class %r does not have a mapped column named %r"
+ % (self.cls, key))
desc = mp.all_orm_descriptors[key]
if desc.extension_type is interfaces.NOT_EXTENSION:
@@ -204,13 +206,13 @@ class _GetColumns(object):
key = prop.name
elif not isinstance(prop, ColumnProperty):
raise exc.InvalidRequestError(
- "Property %r is not an instance of"
- " ColumnProperty (i.e. does not correspond"
- " directly to a Column)." % key)
+ "Property %r is not an instance of"
+ " ColumnProperty (i.e. does not correspond"
+ " directly to a Column)." % key)
return getattr(self.cls, key)
inspection._inspects(_GetColumns)(
- lambda target: inspection.inspect(target.cls))
+ lambda target: inspection.inspect(target.cls))
class _GetTable(object):
@@ -220,8 +222,8 @@ class _GetTable(object):
def __getattr__(self, key):
return self.metadata.tables[
- _get_table_key(key, self.key)
- ]
+ _get_table_key(key, self.key)
+ ]
def _determine_container(key, value):
@@ -248,7 +250,7 @@ class _class_resolver(object):
elif key in cls.metadata._schemas:
return _GetTable(key, cls.metadata)
elif '_sa_module_registry' in cls._decl_class_registry and \
- key in cls._decl_class_registry['_sa_module_registry']:
+ key in cls._decl_class_registry['_sa_module_registry']:
registry = cls._decl_class_registry['_sa_module_registry']
return registry.resolve_attr(key)
elif self._resolvers:
diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py
index 233f172ef..d311fb2d4 100644
--- a/lib/sqlalchemy/ext/horizontal_shard.py
+++ b/lib/sqlalchemy/ext/horizontal_shard.py
@@ -44,10 +44,10 @@ class ShardedQuery(Query):
def iter_for_shard(shard_id):
context.attributes['shard_id'] = shard_id
result = self._connection_from_session(
- mapper=self._mapper_zero(),
- shard_id=shard_id).execute(
- context.statement,
- self._params)
+ mapper=self._mapper_zero(),
+ shard_id=shard_id).execute(
+ context.statement,
+ self._params)
return self.instances(result, context)
if self._shard_id is not None:
@@ -115,9 +115,11 @@ class ShardedSession(Session):
if self.transaction is not None:
return self.transaction.connection(mapper, shard_id=shard_id)
else:
- return self.get_bind(mapper,
- shard_id=shard_id,
- instance=instance).contextual_connect(**kwargs)
+ return self.get_bind(
+ mapper,
+ shard_id=shard_id,
+ instance=instance
+ ).contextual_connect(**kwargs)
def get_bind(self, mapper, shard_id=None,
instance=None, clause=None, **kw):
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py
index 7f5a91355..9f4e09e92 100644
--- a/lib/sqlalchemy/ext/hybrid.py
+++ b/lib/sqlalchemy/ext/hybrid.py
@@ -474,8 +474,8 @@ of measurement, currencies and encrypted passwords.
.. seealso::
`Hybrids and Value Agnostic Types
- <http://techspot.zzzeek.org/2011/10/21/hybrids-and-value-agnostic-types/>`_ -
- on the techspot.zzzeek.org blog
+ <http://techspot.zzzeek.org/2011/10/21/hybrids-and-value-agnostic-types/>`_
+ - on the techspot.zzzeek.org blog
`Value Agnostic Types, Part II
<http://techspot.zzzeek.org/2011/10/29/value-agnostic-types-part-ii/>`_ -
@@ -659,6 +659,7 @@ HYBRID_PROPERTY = util.symbol('HYBRID_PROPERTY')
"""
+
class hybrid_method(interfaces._InspectionAttr):
"""A decorator which allows definition of a Python object method with both
instance-level and class-level behavior.
@@ -780,7 +781,7 @@ class hybrid_property(interfaces._InspectionAttr):
"""
proxy_attr = attributes.\
- create_proxied_attribute(self)
+ create_proxied_attribute(self)
def expr(owner):
return proxy_attr(owner, self.__name__, self, comparator(owner))
diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py
index 2cf36e9bd..024136661 100644
--- a/lib/sqlalchemy/ext/instrumentation.py
+++ b/lib/sqlalchemy/ext/instrumentation.py
@@ -105,7 +105,7 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory):
def _check_conflicts(self, class_, factory):
existing_factories = self._collect_management_factories_for(class_).\
- difference([factory])
+ difference([factory])
if existing_factories:
raise TypeError(
"multiple instrumentation implementations specified "
@@ -182,7 +182,7 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory):
orm_instrumentation._instrumentation_factory = \
- _instrumentation_factory = ExtendedInstrumentationRegistry()
+ _instrumentation_factory = ExtendedInstrumentationRegistry()
orm_instrumentation.instrumentation_finders = instrumentation_finders
@@ -316,7 +316,7 @@ class _ClassInstrumentationAdapter(ClassManager):
return delegate(key, state, factory)
else:
return ClassManager.initialize_collection(self, key,
- state, factory)
+ state, factory)
def new_instance(self, state=None):
instance = self.class_.__new__(self.class_)
diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py
index 0f268de5f..7469bcbda 100644
--- a/lib/sqlalchemy/ext/mutable.py
+++ b/lib/sqlalchemy/ext/mutable.py
@@ -462,15 +462,15 @@ class MutableBase(object):
val._parents[state.obj()] = key
event.listen(parent_cls, 'load', load,
- raw=True, propagate=True)
+ raw=True, propagate=True)
event.listen(parent_cls, 'refresh', load,
- raw=True, propagate=True)
+ raw=True, propagate=True)
event.listen(attribute, 'set', set,
- raw=True, retval=True, propagate=True)
+ raw=True, retval=True, propagate=True)
event.listen(parent_cls, 'pickle', pickle,
- raw=True, propagate=True)
+ raw=True, propagate=True)
event.listen(parent_cls, 'unpickle', unpickle,
- raw=True, propagate=True)
+ raw=True, propagate=True)
class Mutable(MutableBase):
@@ -565,7 +565,6 @@ class Mutable(MutableBase):
return sqltype
-
class MutableComposite(MutableBase):
"""Mixin that defines transparent propagation of change
events on a SQLAlchemy "composite" object to its
@@ -582,16 +581,17 @@ class MutableComposite(MutableBase):
prop = object_mapper(parent).get_property(key)
for value, attr_name in zip(
- self.__composite_values__(),
- prop._attribute_keys):
+ self.__composite_values__(),
+ prop._attribute_keys):
setattr(parent, attr_name, value)
+
def _setup_composite_listener():
def _listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
if (hasattr(prop, 'composite_class') and
- isinstance(prop.composite_class, type) and
- issubclass(prop.composite_class, MutableComposite)):
+ isinstance(prop.composite_class, type) and
+ issubclass(prop.composite_class, MutableComposite)):
prop.composite_class._listen_on_attribute(
getattr(class_, prop.key), False, class_)
if not event.contains(Mapper, "mapper_configured", _listen_for_type):
@@ -611,7 +611,6 @@ class MutableDict(Mutable, dict):
dict.__setitem__(self, key, value)
self.changed()
-
def setdefault(self, key, value):
result = dict.setdefault(self, key, value)
self.changed()
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index 8ffac5fea..67fda44c4 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -83,11 +83,11 @@ With the above mapping the ``Bullet.position`` attribute is managed::
s.bullets[2].position
>>> 2
-The :class:`.OrderingList` construct only works with **changes** to a collection,
-and not the initial load from the database, and requires that the list be
-sorted when loaded. Therefore, be sure to
-specify ``order_by`` on the :func:`.relationship` against the target ordering
-attribute, so that the ordering is correct when first loaded.
+The :class:`.OrderingList` construct only works with **changes** to a
+collection, and not the initial load from the database, and requires that the
+list be sorted when loaded. Therefore, be sure to specify ``order_by`` on the
+:func:`.relationship` against the target ordering attribute, so that the
+ordering is correct when first loaded.
.. warning::
@@ -111,11 +111,11 @@ attribute, so that the ordering is correct when first loaded.
explicit configuration at the mapper level for sets of columns that
are to be handled in this way.
-:func:`.ordering_list` takes the name of the related object's ordering attribute as
-an argument. By default, the zero-based integer index of the object's
-position in the :func:`.ordering_list` is synchronized with the ordering attribute:
-index 0 will get position 0, index 1 position 1, etc. To start numbering at 1
-or some other integer, provide ``count_from=1``.
+:func:`.ordering_list` takes the name of the related object's ordering
+attribute as an argument. By default, the zero-based integer index of the
+object's position in the :func:`.ordering_list` is synchronized with the
+ordering attribute: index 0 will get position 0, index 1 position 1, etc. To
+start numbering at 1 or some other integer, provide ``count_from=1``.
"""
@@ -359,7 +359,7 @@ class OrderingList(list):
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(list, func_name)):
+ not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
index 17c1ed30c..bf8d67d8e 100644
--- a/lib/sqlalchemy/ext/serializer.py
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -22,7 +22,8 @@ Usage is nearly the same as that of the standard Python pickle module::
# ... define mappers
- query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
+ query = Session.query(MyClass).
+ filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
# pickle the query
serialized = dumps(query)
@@ -70,7 +71,7 @@ def Serializer(*args, **kw):
pickler = pickle.Pickler(*args, **kw)
def persistent_id(obj):
- #print "serializing:", repr(obj)
+ # print "serializing:", repr(obj)
if isinstance(obj, QueryableAttribute):
cls = obj.impl.class_
key = obj.impl.key
@@ -79,11 +80,12 @@ def Serializer(*args, **kw):
id = "mapper:" + b64encode(pickle.dumps(obj.class_))
elif isinstance(obj, MapperProperty) and not obj.parent.non_primary:
id = "mapperprop:" + b64encode(pickle.dumps(obj.parent.class_)) + \
- ":" + obj.key
+ ":" + obj.key
elif isinstance(obj, Table):
id = "table:" + text_type(obj.key)
elif isinstance(obj, Column) and isinstance(obj.table, Table):
- id = "column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
+ id = "column:" + \
+ text_type(obj.table.key) + ":" + text_type(obj.key)
elif isinstance(obj, Session):
id = "session:"
elif isinstance(obj, Engine):
@@ -96,7 +98,7 @@ def Serializer(*args, **kw):
return pickler
our_ids = re.compile(
- r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)')
+ r'(mapperprop|mapper|table|column|session|attribute|engine):(.*)')
def Deserializer(file, metadata=None, scoped_session=None, engine=None):