summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/ext
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/ext')
-rw-r--r--lib/sqlalchemy/ext/associationproxy.py16
-rw-r--r--lib/sqlalchemy/ext/compiler.py34
-rwxr-xr-xlib/sqlalchemy/ext/declarative.py146
-rw-r--r--lib/sqlalchemy/ext/horizontal_shard.py22
-rw-r--r--lib/sqlalchemy/ext/hybrid.py32
-rw-r--r--lib/sqlalchemy/ext/mutable.py110
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py10
-rw-r--r--lib/sqlalchemy/ext/serializer.py26
-rw-r--r--lib/sqlalchemy/ext/sqlsoup.py112
9 files changed, 254 insertions, 254 deletions
diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py
index bc62c6efa..969f60326 100644
--- a/lib/sqlalchemy/ext/associationproxy.py
+++ b/lib/sqlalchemy/ext/associationproxy.py
@@ -179,7 +179,7 @@ class AssociationProxy(object):
proxy = self._new(_lazy_collection(obj, self.target_collection))
setattr(obj, self.key, (id(obj), proxy))
return proxy
-
+
def __set__(self, obj, values):
if self.owning_class is None:
self.owning_class = type(obj)
@@ -233,7 +233,7 @@ class AssociationProxy(object):
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
-
+
if self.collection_class is list:
return _AssociationList(lazy_collection, creator, getter, setter, self)
elif self.collection_class is dict:
@@ -254,7 +254,7 @@ class AssociationProxy(object):
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
-
+
proxy.creator = creator
proxy.getter = getter
proxy.setter = setter
@@ -279,7 +279,7 @@ class AssociationProxy(object):
def any(self, criterion=None, **kwargs):
return self._comparator.any(getattr(self.target_class, self.value_attr).has(criterion, **kwargs))
-
+
def has(self, criterion=None, **kwargs):
return self._comparator.has(getattr(self.target_class, self.value_attr).has(criterion, **kwargs))
@@ -308,15 +308,15 @@ class _lazy_collection(object):
def __getstate__(self):
return {'obj':self.ref(), 'target':self.target}
-
+
def __setstate__(self, state):
self.ref = weakref.ref(state['obj'])
self.target = state['target']
class _AssociationCollection(object):
def __init__(self, lazy_collection, creator, getter, setter, parent):
- """Constructs an _AssociationCollection.
-
+ """Constructs an _AssociationCollection.
+
This will always be a subclass of either _AssociationList,
_AssociationSet, or _AssociationDict.
@@ -360,7 +360,7 @@ class _AssociationCollection(object):
self.parent = state['parent']
self.lazy_collection = state['lazy_collection']
self.parent._inflate(self)
-
+
class _AssociationList(_AssociationCollection):
"""Generic, converting, list-to-list proxy."""
diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py
index e6a6ca744..0b96ce25d 100644
--- a/lib/sqlalchemy/ext/compiler.py
+++ b/lib/sqlalchemy/ext/compiler.py
@@ -14,24 +14,24 @@ subclasses and one or more callables defining its compilation::
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.expression import ColumnClause
-
+
class MyColumn(ColumnClause):
pass
-
+
@compiles(MyColumn)
def compile_mycolumn(element, compiler, **kw):
return "[%s]" % element.name
-
+
Above, ``MyColumn`` extends :class:`~sqlalchemy.sql.expression.ColumnClause`,
the base expression element for named column objects. The ``compiles``
decorator registers itself with the ``MyColumn`` class so that it is invoked
when the object is compiled to a string::
from sqlalchemy import select
-
+
s = select([MyColumn('x'), MyColumn('y')])
print str(s)
-
+
Produces::
SELECT [x], [y]
@@ -71,7 +71,7 @@ and :class:`~sqlalchemy.sql.compiler.DDLCompiler` both include a ``process()``
method which can be used for compilation of embedded attributes::
from sqlalchemy.sql.expression import Executable, ClauseElement
-
+
class InsertFromSelect(Executable, ClauseElement):
def __init__(self, table, select):
self.table = table
@@ -86,7 +86,7 @@ method which can be used for compilation of embedded attributes::
insert = InsertFromSelect(t1, select([t1]).where(t1.c.x>5))
print insert
-
+
Produces::
"INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z FROM mytable WHERE mytable.x > :x_1)"
@@ -139,7 +139,7 @@ Changing Compilation of Types
return "VARCHAR('max')"
else:
return compiler.visit_VARCHAR(element, **kw)
-
+
foo = Table('foo', metadata,
Column('data', VARCHAR('max'))
)
@@ -158,12 +158,12 @@ A big part of using the compiler extension is subclassing SQLAlchemy expression
"column-like" elements. Anything that you'd place in the "columns" clause of
a SELECT statement (as well as order by and group by) can derive from this -
the object will automatically have Python "comparison" behavior.
-
+
:class:`~sqlalchemy.sql.expression.ColumnElement` classes want to have a
``type`` member which is expression's return type. This can be established
at the instance level in the constructor, or at the class level if its
generally constant::
-
+
class timestamp(ColumnElement):
type = TIMESTAMP()
@@ -173,7 +173,7 @@ A big part of using the compiler extension is subclassing SQLAlchemy expression
statements along the line of "SELECT FROM <some function>"
``FunctionElement`` adds in the ability to be used in the FROM clause of a
``select()`` construct::
-
+
from sqlalchemy.sql.expression import FunctionElement
class coalesce(FunctionElement):
@@ -209,14 +209,14 @@ def compiles(class_, *specs):
existing_dispatch = class_.__dict__.get('_compiler_dispatch')
if not existing:
existing = _dispatcher()
-
+
if existing_dispatch:
existing.specs['default'] = existing_dispatch
-
+
# TODO: why is the lambda needed ?
setattr(class_, '_compiler_dispatch', lambda *arg, **kw: existing(*arg, **kw))
setattr(class_, '_compiler_dispatcher', existing)
-
+
if specs:
for s in specs:
existing.specs[s] = fn
@@ -225,15 +225,15 @@ def compiles(class_, *specs):
existing.specs['default'] = fn
return fn
return decorate
-
+
class _dispatcher(object):
def __init__(self):
self.specs = {}
-
+
def __call__(self, element, compiler, **kw):
# TODO: yes, this could also switch off of DBAPI in use.
fn = self.specs.get(compiler.dialect.name, None)
if not fn:
fn = self.specs['default']
return fn(element, compiler, **kw)
-
+
diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py
index 1199e69f3..feee435ed 100755
--- a/lib/sqlalchemy/ext/declarative.py
+++ b/lib/sqlalchemy/ext/declarative.py
@@ -39,7 +39,7 @@ The resulting table and mapper are accessible via
# access the mapped Table
SomeClass.__table__
-
+
# access the Mapper
SomeClass.__mapper__
@@ -57,7 +57,7 @@ just give the column a name. Below, column "some_table_id" is mapped to the
class SomeClass(Base):
__tablename__ = 'some_table'
id = Column("some_table_id", Integer, primary_key=True)
-
+
Attributes may be added to the class after its construction, and they will be
added to the underlying :class:`.Table` and
:func:`.mapper()` definitions as appropriate::
@@ -66,7 +66,7 @@ added to the underlying :class:`.Table` and
SomeClass.related = relationship(RelatedInfo)
Classes which are constructed using declarative can interact freely
-with classes that are mapped explicitly with :func:`mapper`.
+with classes that are mapped explicitly with :func:`mapper`.
It is recommended, though not required, that all tables
share the same underlying :class:`~sqlalchemy.schema.MetaData` object,
@@ -179,7 +179,7 @@ the :class:`.MetaData` object used by the declarative base::
Column('author_id', Integer, ForeignKey('authors.id')),
Column('keyword_id', Integer, ForeignKey('keywords.id'))
)
-
+
class Author(Base):
__tablename__ = 'authors'
id = Column(Integer, primary_key=True)
@@ -211,11 +211,11 @@ using Python 2.6 style properties::
@property
def attr(self):
return self._attr
-
+
@attr.setter
def attr(self, attr):
self._attr = attr
-
+
attr = synonym('_attr', descriptor=attr)
The above synonym is then usable as an instance attribute as well as a
@@ -230,7 +230,7 @@ conjunction with ``@property``::
class MyClass(Base):
__tablename__ = 'sometable'
-
+
id = Column(Integer, primary_key=True)
_attr = Column('attr', String)
@@ -277,19 +277,19 @@ need either from the local class definition or from remote
classes::
from sqlalchemy.sql import func
-
+
class Address(Base):
__tablename__ = 'address'
id = Column('id', Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
-
+
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String)
-
+
address_count = column_property(
select([func.count(Address.id)]).\\
where(Address.user_id==id)
@@ -357,15 +357,15 @@ to a table::
table metadata, while still getting most of the benefits of using declarative.
An application that uses reflection might want to load table metadata elsewhere
and simply pass it to declarative classes::
-
+
from sqlalchemy.ext.declarative import declarative_base
-
+
Base = declarative_base()
Base.metadata.reflect(some_engine)
-
+
class User(Base):
__table__ = metadata.tables['user']
-
+
class Address(Base):
__table__ = metadata.tables['address']
@@ -386,13 +386,13 @@ mapped columns can reference them directly from within the
class declaration::
from datetime import datetime
-
+
class Widget(Base):
__tablename__ = 'widgets'
-
+
id = Column(Integer, primary_key=True)
timestamp = Column(DateTime, nullable=False)
-
+
__mapper_args__ = {
'version_id_col': timestamp,
'version_id_generator': lambda v:datetime.now()
@@ -488,7 +488,7 @@ Concrete is defined as a subclass which has its own table and sets the
__tablename__ = 'people'
id = Column(Integer, primary_key=True)
name = Column(String(50))
-
+
class Engineer(Person):
__tablename__ = 'engineers'
__mapper_args__ = {'concrete':True}
@@ -509,16 +509,16 @@ requires usage of :func:`~sqlalchemy.orm.util.polymorphic_union`::
Column('name', String(50)),
Column('golf_swing', String(50))
)
-
+
punion = polymorphic_union({
'engineer':engineers,
'manager':managers
}, 'type', 'punion')
-
+
class Person(Base):
__table__ = punion
__mapper_args__ = {'polymorphic_on':punion.c.type}
-
+
class Engineer(Person):
__table__ = engineers
__mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True}
@@ -526,7 +526,7 @@ requires usage of :func:`~sqlalchemy.orm.util.polymorphic_union`::
class Manager(Person):
__table__ = managers
__mapper_args__ = {'polymorphic_identity':'manager', 'concrete':True}
-
+
Mixin Classes
==============
@@ -541,10 +541,10 @@ using a "mixin class". A mixin class is one that isn't mapped to a
table and doesn't subclass the declarative :class:`Base`. For example::
class MyMixin(object):
-
+
__table_args__ = {'mysql_engine': 'InnoDB'}
__mapper_args__= {'always_refresh': True}
-
+
id = Column(Integer, primary_key=True)
@@ -600,16 +600,16 @@ is provided so that
patterns common to many classes can be defined as callables::
from sqlalchemy.ext.declarative import declared_attr
-
+
class ReferenceAddressMixin(object):
@declared_attr
def address_id(cls):
return Column(Integer, ForeignKey('address.id'))
-
+
class User(Base, ReferenceAddressMixin):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
-
+
Where above, the ``address_id`` class-level callable is executed at the
point at which the ``User`` class is constructed, and the declarative
extension can use the resulting :class:`Column` object as returned by
@@ -631,7 +631,7 @@ will resolve them at class construction time::
class MyModel(Base,MyMixin):
__tablename__='test'
id = Column(Integer, primary_key=True)
-
+
Mixing in Relationships
~~~~~~~~~~~~~~~~~~~~~~~
@@ -647,26 +647,26 @@ reference a common target class via many-to-one::
@declared_attr
def target_id(cls):
return Column('target_id', ForeignKey('target.id'))
-
+
@declared_attr
def target(cls):
return relationship("Target")
-
+
class Foo(Base, RefTargetMixin):
__tablename__ = 'foo'
id = Column(Integer, primary_key=True)
-
+
class Bar(Base, RefTargetMixin):
__tablename__ = 'bar'
id = Column(Integer, primary_key=True)
-
+
class Target(Base):
__tablename__ = 'target'
id = Column(Integer, primary_key=True)
:func:`~sqlalchemy.orm.relationship` definitions which require explicit
primaryjoin, order_by etc. expressions should use the string forms
-for these arguments, so that they are evaluated as late as possible.
+for these arguments, so that they are evaluated as late as possible.
To reference the mixin class in these expressions, use the given ``cls``
to get it's name::
@@ -674,7 +674,7 @@ to get it's name::
@declared_attr
def target_id(cls):
return Column('target_id', ForeignKey('target.id'))
-
+
@declared_attr
def target(cls):
return relationship("Target",
@@ -810,7 +810,7 @@ from multiple collections::
from sqlalchemy.ext.declarative import declared_attr
class MySQLSettings:
- __table_args__ = {'mysql_engine':'InnoDB'}
+ __table_args__ = {'mysql_engine':'InnoDB'}
class MyOtherMixin:
__table_args__ = {'info':'foo'}
@@ -892,7 +892,7 @@ correctly combines the actions of the other metaclasses. For example::
# This is needed to successfully combine
# two mixins which both have metaclasses
pass
-
+
class MyModel(Base,MyMixin1,MyMixin2):
__tablename__ = 'awooooga'
__metaclass__ = CombinedMeta
@@ -901,7 +901,7 @@ correctly combines the actions of the other metaclasses. For example::
For this reason, if a mixin requires a custom metaclass, this should
be mentioned in any documentation of that mixin to avoid confusion
later down the line.
-
+
Class Constructor
=================
@@ -917,7 +917,7 @@ Sessions
Note that ``declarative`` does nothing special with sessions, and is
only intended as an easier way to configure mappers and
:class:`~sqlalchemy.schema.Table` objects. A typical application
-setup using :func:`~sqlalchemy.orm.scoped_session` might look like::
+setup using :func:`~sqlalchemy.orm.scoped_session` might look like::
engine = create_engine('postgresql://scott:tiger@localhost/test')
Session = scoped_session(sessionmaker(autocommit=False,
@@ -947,7 +947,7 @@ def instrument_declarative(cls, registry, metadata):
"""Given a class, configure the class declaratively,
using the given registry, which can be any dictionary, and
MetaData object.
-
+
"""
if '_decl_class_registry' in cls.__dict__:
raise exceptions.InvalidRequestError(
@@ -973,19 +973,19 @@ def _as_declarative(cls, classname, dict_):
column_copies = {}
potential_columns = {}
-
+
mapper_args = {}
table_args = inherited_table_args = None
tablename = None
parent_columns = ()
-
+
declarative_props = (declared_attr, util.classproperty)
-
+
for base in cls.__mro__:
class_mapped = _is_mapped_class(base)
if class_mapped:
parent_columns = base.__table__.c.keys()
-
+
for name,obj in vars(base).items():
if name == '__mapper_args__':
if not mapper_args and (
@@ -1015,7 +1015,7 @@ def _as_declarative(cls, classname, dict_):
continue
elif base is not cls:
# we're a mixin.
-
+
if isinstance(obj, Column):
if obj.foreign_keys:
raise exceptions.InvalidRequestError(
@@ -1048,7 +1048,7 @@ def _as_declarative(cls, classname, dict_):
for k, v in potential_columns.items():
if tablename or (v.name or k) not in parent_columns:
dict_[k] = v
-
+
if inherited_table_args and not tablename:
table_args = None
@@ -1056,7 +1056,7 @@ def _as_declarative(cls, classname, dict_):
# than the original columns from any mixins
for k, v in mapper_args.iteritems():
mapper_args[k] = column_copies.get(v,v)
-
+
if classname in cls._decl_class_registry:
util.warn("The classname %r is already in the registry of this"
@@ -1071,7 +1071,7 @@ def _as_declarative(cls, classname, dict_):
value = dict_[k]
if isinstance(value, declarative_props):
value = getattr(cls, k)
-
+
if (isinstance(value, tuple) and len(value) == 1 and
isinstance(value[0], (Column, MapperProperty))):
util.warn("Ignoring declarative-like tuple value of attribute "
@@ -1108,7 +1108,7 @@ def _as_declarative(cls, classname, dict_):
table = None
if '__table__' not in dict_:
if tablename is not None:
-
+
if isinstance(table_args, dict):
args, table_kw = (), table_args
elif isinstance(table_args, tuple):
@@ -1139,7 +1139,7 @@ def _as_declarative(cls, classname, dict_):
"Can't add additional column %r when "
"specifying __table__" % c.key
)
-
+
if 'inherits' not in mapper_args:
for c in cls.__bases__:
if _is_mapped_class(c):
@@ -1180,7 +1180,7 @@ def _as_declarative(cls, classname, dict_):
"Can't place __table_args__ on an inherited class "
"with no table."
)
-
+
# add any columns declared here to the inherited table.
for c in cols:
if c.primary_key:
@@ -1195,7 +1195,7 @@ def _as_declarative(cls, classname, dict_):
(c, cls, inherited_table.c[c.name])
)
inherited_table.append_column(c)
-
+
# single or joined inheritance
# exclude any cols on the inherited table which are not mapped on the
# parent class, to avoid
@@ -1203,19 +1203,19 @@ def _as_declarative(cls, classname, dict_):
inherited_mapper = class_mapper(mapper_args['inherits'],
compile=False)
inherited_table = inherited_mapper.local_table
-
+
if 'exclude_properties' not in mapper_args:
mapper_args['exclude_properties'] = exclude_properties = \
set([c.key for c in inherited_table.c
if c not in inherited_mapper._columntoproperty])
exclude_properties.difference_update([c.key for c in cols])
-
+
# look through columns in the current mapper that
# are keyed to a propname different than the colname
# (if names were the same, we'd have popped it out above,
# in which case the mapper makes this combination).
- # See if the superclass has a similar column property.
- # If so, join them together.
+ # See if the superclass has a similar column property.
+ # If so, join them together.
for k, col in our_stuff.items():
if not isinstance(col, expression.ColumnElement):
continue
@@ -1227,7 +1227,7 @@ def _as_declarative(cls, classname, dict_):
# append() in mapper._configure_property().
# change this ordering when we do [ticket:1892]
our_stuff[k] = p.columns + [col]
-
+
cls.__mapper__ = mapper_cls(cls,
table,
properties=our_stuff,
@@ -1267,7 +1267,7 @@ class DeclarativeMeta(type):
class _GetColumns(object):
def __init__(self, cls):
self.cls = cls
-
+
def __getattr__(self, key):
mapper = class_mapper(self.cls, compile=False)
if mapper:
@@ -1275,7 +1275,7 @@ class _GetColumns(object):
raise exceptions.InvalidRequestError(
"Class %r does not have a mapped column named %r"
% (self.cls, key))
-
+
prop = mapper.get_property(key)
if not isinstance(prop, ColumnProperty):
raise exceptions.InvalidRequestError(
@@ -1288,16 +1288,16 @@ class _GetTable(object):
def __init__(self, key, metadata):
self.key = key
self.metadata = metadata
-
+
def __getattr__(self, key):
return self.metadata.tables[
_get_table_key(key, self.key)
]
-
+
def _deferred_relationship(cls, prop):
def resolve_arg(arg):
import sqlalchemy
-
+
def access_cls(key):
if key in cls._decl_class_registry:
return _GetColumns(cls._decl_class_registry[key])
@@ -1312,7 +1312,7 @@ def _deferred_relationship(cls, prop):
def return_cls():
try:
x = eval(arg, globals(), d)
-
+
if isinstance(x, _GetColumns):
return x.cls
else:
@@ -1395,7 +1395,7 @@ class declared_attr(property):
.. note:: @declared_attr is available as
``sqlalchemy.util.classproperty`` for SQLAlchemy versions
0.6.2, 0.6.3, 0.6.4.
-
+
@declared_attr turns the attribute into a scalar-like
property that can be invoked from the uninstantiated class.
Declarative treats attributes specifically marked with
@@ -1403,29 +1403,29 @@ class declared_attr(property):
to mapping or declarative table configuration. The name
of the attribute is that of what the non-dynamic version
of the attribute would be.
-
+
@declared_attr is more often than not applicable to mixins,
to define relationships that are to be applied to different
implementors of the class::
-
+
class ProvidesUser(object):
"A mixin that adds a 'user' relationship to classes."
-
+
@declared_attr
def user(self):
return relationship("User")
-
+
It also can be applied to mapped classes, such as to provide
a "polymorphic" scheme for inheritance::
-
+
class Employee(Base):
id = Column(Integer, primary_key=True)
type = Column(String(50), nullable=False)
-
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
-
+
@declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Employee':
@@ -1435,13 +1435,13 @@ class declared_attr(property):
}
else:
return {"polymorphic_identity":cls.__name__}
-
+
"""
-
+
def __init__(self, fget, *arg, **kw):
super(declared_attr, self).__init__(fget, *arg, **kw)
self.__doc__ = fget.__doc__
-
+
def __get__(desc, self, cls):
return desc.fget(cls)
diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py
index 880dfb743..41fae8e7b 100644
--- a/lib/sqlalchemy/ext/horizontal_shard.py
+++ b/lib/sqlalchemy/ext/horizontal_shard.py
@@ -40,10 +40,10 @@ class ShardedSession(Session):
:param query_chooser: For a given Query, returns the list of shard_ids where the query
should be issued. Results from all shards returned will be combined
together into a single listing.
-
+
:param shards: A dictionary of string shard names to :class:`~sqlalchemy.engine.base.Engine`
- objects.
-
+ objects.
+
"""
super(ShardedSession, self).__init__(**kwargs)
self.shard_chooser = shard_chooser
@@ -55,7 +55,7 @@ class ShardedSession(Session):
if shards is not None:
for k in shards:
self.bind_shard(k, shards[k])
-
+
def connection(self, mapper=None, instance=None, shard_id=None, **kwargs):
if shard_id is None:
shard_id = self.shard_chooser(mapper, instance)
@@ -66,7 +66,7 @@ class ShardedSession(Session):
return self.get_bind(mapper,
shard_id=shard_id,
instance=instance).contextual_connect(**kwargs)
-
+
def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw):
if shard_id is None:
shard_id = self.shard_chooser(mapper, instance, clause=clause)
@@ -81,18 +81,18 @@ class ShardedQuery(Query):
self.id_chooser = self.session.id_chooser
self.query_chooser = self.session.query_chooser
self._shard_id = None
-
+
def set_shard(self, shard_id):
"""return a new query, limited to a single shard ID.
-
+
all subsequent operations with the returned query will
be against the single shard regardless of other state.
"""
-
+
q = self._clone()
q._shard_id = shard_id
return q
-
+
def _execute_and_instances(self, context):
if self._shard_id is not None:
result = self.session.connection(
@@ -106,7 +106,7 @@ class ShardedQuery(Query):
mapper=self._mapper_zero(),
shard_id=shard_id).execute(context.statement, self._params)
partial = partial + list(self.instances(result, context))
-
+
# if some kind of in memory 'sorting'
# were done, this is where it would happen
return iter(partial)
@@ -122,4 +122,4 @@ class ShardedQuery(Query):
return o
else:
return None
-
+
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py
index 153eccce2..f3989a84d 100644
--- a/lib/sqlalchemy/ext/hybrid.py
+++ b/lib/sqlalchemy/ext/hybrid.py
@@ -13,7 +13,7 @@ Consider a table `interval` as below::
from sqlalchemy import MetaData, Table, Column, Integer
from sqlalchemy.orm import mapper, create_session
-
+
engine = create_engine('sqlite://')
metadata = MetaData()
@@ -22,22 +22,22 @@ Consider a table `interval` as below::
Column('start', Integer, nullable=False),
Column('end', Integer, nullable=False))
metadata.create_all(engine)
-
+
We can define higher level functions on mapped classes that produce SQL
expressions at the class level, and Python expression evaluation at the
instance level. Below, each function decorated with :func:`hybrid.method`
or :func:`hybrid.property` may receive ``self`` as an instance of the class,
or as the class itself::
-
+
# A base class for intervals
from sqlalchemy.orm import hybrid
-
+
class Interval(object):
def __init__(self, start, end):
self.start = start
self.end = end
-
+
@hybrid.property
def length(self):
return self.end - self.start
@@ -45,13 +45,13 @@ or as the class itself::
@hybrid.method
def contains(self,point):
return (self.start <= point) & (point < self.end)
-
+
@hybrid.method
def intersects(self, other):
return self.contains(other.start) | self.contains(other.end)
-
+
"""
from sqlalchemy import util
from sqlalchemy.orm import attributes, interfaces
@@ -60,7 +60,7 @@ class method(object):
def __init__(self, func, expr=None):
self.func = func
self.expr = expr or func
-
+
def __get__(self, instance, owner):
if instance is None:
return new.instancemethod(self.expr, owner, owner.__class__)
@@ -84,13 +84,13 @@ class property_(object):
return self.expr(owner)
else:
return self.fget(instance)
-
+
def __set__(self, instance, value):
self.fset(instance, value)
-
+
def __delete__(self, instance):
self.fdel(instance)
-
+
def setter(self, fset):
self.fset = fset
return self
@@ -98,11 +98,11 @@ class property_(object):
def deleter(self, fdel):
self.fdel = fdel
return self
-
+
def expression(self, expr):
self.expr = expr
return self
-
+
def comparator(self, comparator):
proxy_attr = attributes.\
create_proxied_attribute(self)
@@ -115,15 +115,15 @@ class property_(object):
class Comparator(interfaces.PropComparator):
def __init__(self, expression):
self.expression = expression
-
+
def __clause_element__(self):
expr = self.expression
while hasattr(expr, '__clause_element__'):
expr = expr.__clause_element__()
return expr
-
+
def adapted(self, adapter):
# interesting....
return self
-
+
diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py
index 2bb879322..11a7977f6 100644
--- a/lib/sqlalchemy/ext/mutable.py
+++ b/lib/sqlalchemy/ext/mutable.py
@@ -22,47 +22,47 @@ import weakref
class Mutable(object):
"""Mixin that defines transparent propagation of change
events to a parent object.
-
+
"""
-
+
@memoized_property
def _parents(self):
"""Dictionary of parent object->attribute name on the parent."""
-
+
return weakref.WeakKeyDictionary()
-
+
def change(self):
"""Subclasses should call this method whenever change events occur."""
-
+
for parent, key in self._parents.items():
flag_modified(parent, key)
-
+
@classmethod
def coerce(cls, key, value):
"""Given a value, coerce it into this type.
-
+
By default raises ValueError.
"""
if value is None:
return None
raise ValueError("Attribute '%s' accepts objects of type %s" % (key, cls))
-
-
+
+
@classmethod
def associate_with_attribute(cls, attribute):
"""Establish this type as a mutation listener for the given
mapped descriptor.
-
+
"""
key = attribute.key
parent_cls = attribute.class_
-
+
def load(state, *args):
- """Listen for objects loaded or refreshed.
-
+ """Listen for objects loaded or refreshed.
+
Wrap the target data member's value with
``Mutable``.
-
+
"""
val = state.dict.get(key, None)
if val is not None:
@@ -73,20 +73,20 @@ class Mutable(object):
def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
-
+
Establish a weak reference to the parent object
on the incoming value, remove it for the one
outgoing.
-
+
"""
-
+
if not isinstance(value, cls):
value = cls.coerce(key, value)
value._parents[target.obj()] = key
if isinstance(oldvalue, cls):
oldvalue._parents.pop(state.obj(), None)
return value
-
+
event.listen(parent_cls, 'load', load, raw=True)
event.listen(parent_cls, 'refresh', load, raw=True)
event.listen(attribute, 'set', set, raw=True, retval=True)
@@ -97,7 +97,7 @@ class Mutable(object):
def associate_with(cls, sqltype):
"""Associate this wrapper with all future mapped columns
of the given type.
-
+
This is a convenience method that calls ``associate_with_attribute`` automatically.
.. warning:: The listeners established by this method are *global*
@@ -105,7 +105,7 @@ class Mutable(object):
:meth:`.associate_with` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
-
+
"""
def listen_for_type(mapper, class_):
@@ -114,39 +114,39 @@ class Mutable(object):
if isinstance(prop.columns[0].type, sqltype):
cls.associate_with_attribute(getattr(class_, prop.key))
break
-
+
event.listen(mapper, 'mapper_configured', listen_for_type)
-
+
@classmethod
def as_mutable(cls, sqltype):
"""Associate a SQL type with this mutable Python type.
-
+
This establishes listeners that will detect ORM mappings against
the given type, adding mutation event trackers to those mappings.
-
+
The type is returned, unconditionally as an instance, so that
:meth:`.as_mutable` can be used inline::
-
+
Table('mytable', metadata,
Column('id', Integer, primary_key=True),
Column('data', MyMutableType.as_mutable(PickleType))
)
-
+
Note that the returned type is always an instance, even if a class
is given, and that only columns which are declared specifically with that
type instance receive additional instrumentation.
-
+
To associate a particular mutable type with all occurences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
of the particular :meth:`.Mutable` subclass to establish a global
assoiation.
-
+
.. warning:: The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.as_mutable` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
-
+
"""
sqltype = types.to_instance(sqltype)
@@ -156,9 +156,9 @@ class Mutable(object):
if prop.columns[0].type is sqltype:
cls.associate_with_attribute(getattr(class_, prop.key))
break
-
+
event.listen(mapper, 'mapper_configured', listen_for_type)
-
+
return sqltype
@@ -171,14 +171,14 @@ class MutableComposite(object):
"""Mixin that defines transparent propagation of change
events on a SQLAlchemy "composite" object to its
owning parent or parents.
-
+
Composite classes, in addition to meeting the usage contract
defined in :ref:`mapper_composite`, also define some system
of relaying change events to the given :meth:`.change`
method, which will notify all parents of the change. Below
the special Python method ``__setattr__`` is used to intercept
all changes::
-
+
class Point(MutableComposite):
def __init__(self, x, y):
self.x = x
@@ -187,10 +187,10 @@ class MutableComposite(object):
def __setattr__(self, key, value):
object.__setattr__(self, key, value)
self.change()
-
+
def __composite_values__(self):
return self.x, self.y
-
+
def __eq__(self, other):
return isinstance(other, Point) and \
other.x == self.x and \
@@ -206,44 +206,44 @@ class MutableComposite(object):
:class:`.MutableComposite` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
-
+
"""
__metaclass__ = _MutableCompositeMeta
@memoized_property
def _parents(self):
"""Dictionary of parent object->attribute name on the parent."""
-
+
return weakref.WeakKeyDictionary()
def change(self):
"""Subclasses should call this method whenever change events occur."""
-
+
for parent, key in self._parents.items():
-
+
prop = object_mapper(parent).get_property(key)
for value, attr_name in zip(
self.__composite_values__(),
prop._attribute_keys):
setattr(parent, attr_name, value)
-
+
@classmethod
def _listen_on_attribute(cls, attribute):
"""Establish this type as a mutation listener for the given
mapped descriptor.
-
+
"""
key = attribute.key
parent_cls = attribute.class_
-
+
def load(state, *args):
- """Listen for objects loaded or refreshed.
-
+ """Listen for objects loaded or refreshed.
+
Wrap the target data member's value with
``Mutable``.
-
+
"""
-
+
val = state.dict.get(key, None)
if val is not None:
val._parents[state.obj()] = key
@@ -251,37 +251,37 @@ class MutableComposite(object):
def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
-
+
Establish a weak reference to the parent object
on the incoming value, remove it for the one
outgoing.
-
+
"""
-
+
value._parents[target.obj()] = key
if isinstance(oldvalue, cls):
oldvalue._parents.pop(state.obj(), None)
return value
-
+
event.listen(parent_cls, 'load', load, raw=True)
event.listen(parent_cls, 'refresh', load, raw=True)
event.listen(attribute, 'set', set, raw=True, retval=True)
# TODO: need a deserialize hook here
-
+
@classmethod
def _setup_listeners(cls):
"""Associate this wrapper with all future mapped compoistes
of the given type.
-
+
This is a convenience method that calls ``associate_with_attribute`` automatically.
-
+
"""
-
+
def listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
cls._listen_on_attribute(getattr(class_, prop.key))
-
+
event.listen(mapper, 'mapper_configured', listen_for_type)
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index 062172bcc..ce63b88ea 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -52,7 +52,7 @@ An ``orderinglist`` can automate this and manage the 'position' attribute on all
related bullets for you.
.. sourcecode:: python+sql
-
+
mapper(Slide, slides_table, properties={
'bullets': relationship(Bullet,
collection_class=ordering_list('position'),
@@ -71,7 +71,7 @@ related bullets for you.
Use the ``ordering_list`` function to set up the ``collection_class`` on relationships
(as in the mapper example above). This implementation depends on the list
-starting in the proper order, so be SURE to put an order_by on your relationship.
+starting in the proper order, so be SURE to put an order_by on your relationship.
.. warning:: ``ordering_list`` only provides limited functionality when a primary
key column or unique column is the target of the sort. Since changing the order of
@@ -89,7 +89,7 @@ or some other integer, provide ``count_from=1``.
Ordering values are not limited to incrementing integers. Almost any scheme
can implemented by supplying a custom ``ordering_func`` that maps a Python list
-index to any value you require.
+index to any value you require.
@@ -292,7 +292,7 @@ class OrderingList(list):
stop = index.stop or len(self)
if stop < 0:
stop += len(self)
-
+
for i in xrange(start, stop, step):
self.__setitem__(i, entity[i])
else:
@@ -312,7 +312,7 @@ class OrderingList(list):
super(OrderingList, self).__delslice__(start, end)
self._reorder()
# end Py2K
-
+
for func_name, func in locals().items():
if (util.callable(func) and func.func_name == func_name and
not func.__doc__ and hasattr(list, func_name)):
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
index 8c098c3df..077a0fd9e 100644
--- a/lib/sqlalchemy/ext/serializer.py
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -18,17 +18,17 @@ Usage is nearly the same as that of the standard Python pickle module::
from sqlalchemy.ext.serializer import loads, dumps
metadata = MetaData(bind=some_engine)
Session = scoped_session(sessionmaker())
-
+
# ... define mappers
-
+
query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
-
+
# pickle the query
serialized = dumps(query)
-
+
# unpickle. Pass in metadata + scoped_session
query2 = loads(serialized, metadata, Session)
-
+
print query2.all()
Similar restrictions as when using raw pickle apply; mapped classes must be
@@ -81,7 +81,7 @@ __all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
def Serializer(*args, **kw):
pickler = pickle.Pickler(*args, **kw)
-
+
def persistent_id(obj):
#print "serializing:", repr(obj)
if isinstance(obj, QueryableAttribute):
@@ -101,15 +101,15 @@ def Serializer(*args, **kw):
else:
return None
return id
-
+
pickler.persistent_id = persistent_id
return pickler
-
+
our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)')
def Deserializer(file, metadata=None, scoped_session=None, engine=None):
unpickler = pickle.Unpickler(file)
-
+
def get_engine():
if engine:
return engine
@@ -119,7 +119,7 @@ def Deserializer(file, metadata=None, scoped_session=None, engine=None):
return metadata.bind
else:
return None
-
+
def persistent_load(id):
m = our_ids.match(id)
if not m:
@@ -152,10 +152,10 @@ def dumps(obj, protocol=0):
pickler = Serializer(buf, protocol)
pickler.dump(obj)
return buf.getvalue()
-
+
def loads(data, metadata=None, scoped_session=None, engine=None):
buf = byte_buffer(data)
unpickler = Deserializer(buf, metadata, scoped_session, engine)
return unpickler.load()
-
-
+
+
diff --git a/lib/sqlalchemy/ext/sqlsoup.py b/lib/sqlalchemy/ext/sqlsoup.py
index ebe2feb7f..9e6f63aca 100644
--- a/lib/sqlalchemy/ext/sqlsoup.py
+++ b/lib/sqlalchemy/ext/sqlsoup.py
@@ -257,7 +257,7 @@ The default session is available at the module level in SQLSoup,
via::
>>> from sqlalchemy.ext.sqlsoup import Session
-
+
The configuration of this session is ``autoflush=True``,
``autocommit=False``. This means when you work with the SqlSoup
object, you need to call ``db.commit()`` in order to have
@@ -460,7 +460,7 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
engine_encoding = engine.dialect.encoding
mapname = mapname.encode(engine_encoding)
# end Py2K
-
+
if isinstance(selectable, Table):
klass = TableClassType(mapname, (base_cls,), {})
else:
@@ -475,10 +475,10 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
except AttributeError:
raise TypeError('unable to compare with %s' % o.__class__)
return t1, t2
-
+
# python2/python3 compatible system of
# __cmp__ - __lt__ + __eq__
-
+
def __lt__(self, o):
t1, t2 = _compare(self, o)
return t1 < t2
@@ -486,12 +486,12 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
def __eq__(self, o):
t1, t2 = _compare(self, o)
return t1 == t2
-
+
def __repr__(self):
L = ["%s=%r" % (key, getattr(self, key, ''))
for key in self.__class__.c.keys()]
return '%s(%s)' % (self.__class__.__name__, ','.join(L))
-
+
for m in ['__eq__', '__repr__', '__lt__']:
setattr(klass, m, eval(m))
klass._table = selectable
@@ -500,16 +500,16 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
selectable,
extension=AutoAdd(session),
**mapper_kwargs)
-
+
for k in mappr.iterate_properties:
klass.c[k.key] = k.columns[0]
-
+
klass._query = session.query_property()
return klass
class SqlSoup(object):
"""Represent an ORM-wrapped database resource."""
-
+
def __init__(self, engine_or_metadata, base=object, session=None):
"""Initialize a new :class:`.SqlSoup`.
@@ -525,10 +525,10 @@ class SqlSoup(object):
module is used.
"""
-
+
self.session = session or Session
self.base=base
-
+
if isinstance(engine_or_metadata, MetaData):
self._metadata = engine_or_metadata
elif isinstance(engine_or_metadata, (basestring, Engine)):
@@ -536,10 +536,10 @@ class SqlSoup(object):
else:
raise ArgumentError("invalid engine or metadata argument %r" %
engine_or_metadata)
-
+
self._cache = {}
self.schema = None
-
+
@property
def bind(self):
"""The :class:`.Engine` associated with this :class:`.SqlSoup`."""
@@ -551,83 +551,83 @@ class SqlSoup(object):
"""Mark an instance as deleted."""
self.session.delete(instance)
-
+
def execute(self, stmt, **params):
"""Execute a SQL statement.
-
+
The statement may be a string SQL string,
an :func:`.expression.select` construct, or an :func:`.expression.text`
construct.
-
+
"""
return self.session.execute(sql.text(stmt, bind=self.bind), **params)
-
+
@property
def _underlying_session(self):
if isinstance(self.session, session.Session):
return self.session
else:
return self.session()
-
+
def connection(self):
"""Return the current :class:`.Connection` in use by the current transaction."""
-
+
return self._underlying_session._connection_for_bind(self.bind)
-
+
def flush(self):
"""Flush pending changes to the database.
-
+
See :meth:`.Session.flush`.
-
+
"""
self.session.flush()
-
+
def rollback(self):
"""Rollback the current transction.
-
+
See :meth:`.Session.rollback`.
-
+
"""
self.session.rollback()
-
+
def commit(self):
"""Commit the current transaction.
-
+
See :meth:`.Session.commit`.
-
+
"""
self.session.commit()
-
+
def clear(self):
"""Synonym for :meth:`.SqlSoup.expunge_all`."""
-
+
self.session.expunge_all()
-
+
def expunge(self, instance):
"""Remove an instance from the :class:`.Session`.
-
+
See :meth:`.Session.expunge`.
-
+
"""
self.session.expunge(instance)
-
+
def expunge_all(self):
"""Clear all objects from the current :class:`.Session`.
-
+
See :meth:`.Session.expunge_all`.
-
+
"""
self.session.expunge_all()
def map_to(self, attrname, tablename=None, selectable=None,
schema=None, base=None, mapper_args=util.frozendict()):
"""Configure a mapping to the given attrname.
-
+
This is the "master" method that can be used to create any
configuration.
-
+
(new in 0.6.6)
-
+
:param attrname: String attribute name which will be
established as an attribute on this :class:.`.SqlSoup`
instance.
@@ -648,8 +648,8 @@ class SqlSoup(object):
argument.
:param schema: String schema name to use if the
``tablename`` argument is present.
-
-
+
+
"""
if attrname in self._cache:
raise InvalidRequestError(
@@ -657,7 +657,7 @@ class SqlSoup(object):
attrname,
class_mapper(self._cache[attrname]).mapped_table
))
-
+
if tablename is not None:
if not isinstance(tablename, basestring):
raise ArgumentError("'tablename' argument must be a string."
@@ -692,7 +692,7 @@ class SqlSoup(object):
raise PKNotFoundError(
"selectable '%s' does not have a primary "
"key defined" % selectable)
-
+
mapped_cls = _class_for_table(
self.session,
self.engine,
@@ -702,14 +702,14 @@ class SqlSoup(object):
)
self._cache[attrname] = mapped_cls
return mapped_cls
-
+
def map(self, selectable, base=None, **mapper_args):
"""Map a selectable directly.
-
+
The class and its mapping are not cached and will
be discarded once dereferenced (as of 0.6.6).
-
+
:param selectable: an :func:`.expression.select` construct.
:param base: a Python class which will be used as the
base for the mapped class. If ``None``, the "base"
@@ -718,7 +718,7 @@ class SqlSoup(object):
``object``.
:param mapper_args: Dictionary of arguments which will
be passed directly to :func:`.orm.mapper`.
-
+
"""
return _class_for_table(
@@ -735,7 +735,7 @@ class SqlSoup(object):
The class and its mapping are not cached and will
be discarded once dereferenced (as of 0.6.6).
-
+
:param selectable: an :func:`.expression.select` construct.
:param base: a Python class which will be used as the
base for the mapped class. If ``None``, the "base"
@@ -744,9 +744,9 @@ class SqlSoup(object):
``object``.
:param mapper_args: Dictionary of arguments which will
be passed directly to :func:`.orm.mapper`.
-
+
"""
-
+
# TODO give meaningful aliases
return self.map(
expression._clause_element_as_expr(selectable).
@@ -759,7 +759,7 @@ class SqlSoup(object):
The class and its mapping are not cached and will
be discarded once dereferenced (as of 0.6.6).
-
+
:param left: a mapped class or table object.
:param right: a mapped class or table object.
:param onclause: optional "ON" clause construct..
@@ -771,24 +771,24 @@ class SqlSoup(object):
``object``.
:param mapper_args: Dictionary of arguments which will
be passed directly to :func:`.orm.mapper`.
-
+
"""
-
+
j = join(left, right, onclause=onclause, isouter=isouter)
return self.map(j, base=base, **mapper_args)
def entity(self, attr, schema=None):
"""Return the named entity from this :class:`.SqlSoup`, or
create if not present.
-
+
For more generalized mapping, see :meth:`.map_to`.
-
+
"""
try:
return self._cache[attr]
except KeyError, ke:
return self.map_to(attr, tablename=attr, schema=schema)
-
+
def __getattr__(self, attr):
return self.entity(attr)