summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES16
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py17
-rw-r--r--lib/sqlalchemy/orm/attributes.py72
-rw-r--r--lib/sqlalchemy/orm/collections.py17
-rw-r--r--lib/sqlalchemy/orm/dependency.py15
-rw-r--r--lib/sqlalchemy/orm/identity.py40
-rw-r--r--lib/sqlalchemy/orm/interfaces.py8
-rw-r--r--lib/sqlalchemy/orm/mapper.py11
-rw-r--r--lib/sqlalchemy/orm/properties.py2
-rw-r--r--lib/sqlalchemy/orm/query.py2
-rw-r--r--lib/sqlalchemy/orm/state.py58
-rw-r--r--lib/sqlalchemy/orm/strategies.py277
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py2
-rw-r--r--lib/sqlalchemy/schema.py6
-rw-r--r--lib/sqlalchemy/util/_collections.py3
-rw-r--r--test/aaa_profiling/test_orm.py4
-rw-r--r--test/dialect/test_mssql.py14
-rw-r--r--test/orm/inheritance/test_poly_linked_list.py2
-rw-r--r--test/orm/test_attributes.py158
-rw-r--r--test/orm/test_extendedattr.py27
-rw-r--r--test/orm/test_unitofworkv2.py185
-rw-r--r--test/sql/test_types.py4
22 files changed, 596 insertions, 344 deletions
diff --git a/CHANGES b/CHANGES
index 714d9cb88..900cdd54f 100644
--- a/CHANGES
+++ b/CHANGES
@@ -3,6 +3,16 @@
=======
CHANGES
=======
+0.7.0b1
+=======
+
+- mssql
+ - the String/Unicode types, and their counterparts VARCHAR/
+ NVARCHAR, emit "max" as the length when no length is
+ specified. This makes it more compatible with Postgresql's
+ VARCHAR type which is similarly unbounded when no length
+ specified.
+
0.6.6
=====
- orm
@@ -14,6 +24,12 @@ CHANGES
that weren't previously saved in the "mutable changes"
dictionary.
+ - Fixed uow bug whereby expired objects passed to
+ Session.delete() would not have unloaded references
+ or collections taken into account when deleting
+ objects, despite passive_deletes remaining at
+ its default of False. [ticket:2002]
+
- "innerjoin" flag doesn't take effect along the chain
of joinedload() joins if a previous join in that chain
is an outer join, thus allowing primary rows without
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 0b3dabc01..028322677 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -474,7 +474,7 @@ ischema_names = {
class MSTypeCompiler(compiler.GenericTypeCompiler):
- def _extend(self, spec, type_):
+ def _extend(self, spec, type_, length=None):
"""Extend a string-type declaration with standard SQL
COLLATE annotations.
@@ -484,9 +484,12 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
collation = 'COLLATE %s' % type_.collation
else:
collation = None
-
- if type_.length:
- spec = spec + "(%d)" % type_.length
+
+ if not length:
+ length = type_.length
+
+ if length:
+ spec = spec + "(%s)" % length
return ' '.join([c for c in (spec, collation)
if c is not None])
@@ -540,7 +543,8 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
return self._extend("TEXT", type_)
def visit_VARCHAR(self, type_):
- return self._extend("VARCHAR", type_)
+ return self._extend("VARCHAR", type_,
+ length = type_.length or 'max')
def visit_CHAR(self, type_):
return self._extend("CHAR", type_)
@@ -549,7 +553,8 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
return self._extend("NCHAR", type_)
def visit_NVARCHAR(self, type_):
- return self._extend("NVARCHAR", type_)
+ return self._extend("NVARCHAR", type_,
+ length = type_.length or 'max')
def visit_date(self, type_):
if self.dialect.server_version_info < MS_2008_VERSION:
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index 002215268..e7ab4c3a1 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -23,6 +23,7 @@ mapperutil = util.importlater("sqlalchemy.orm", "util")
PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT')
ATTR_WAS_SET = util.symbol('ATTR_WAS_SET')
+ATTR_EMPTY = util.symbol('ATTR_EMPTY')
NO_VALUE = util.symbol('NO_VALUE')
NEVER_SET = util.symbol('NEVER_SET')
@@ -59,7 +60,7 @@ class QueryableAttribute(interfaces.PropComparator):
self.impl = impl
self.comparator = comparator
self.parententity = parententity
-
+
manager = manager_of_class(class_)
# manager is None in the case of AliasedClass
if manager:
@@ -72,6 +73,10 @@ class QueryableAttribute(interfaces.PropComparator):
dispatch = event.dispatcher(events.AttributeEvents)
dispatch.dispatch_cls.active_history = False
+ @util.memoized_property
+ def _supports_population(self):
+ return self.impl.supports_population
+
def get_history(self, instance, **kwargs):
return self.impl.get_history(instance_state(instance),
instance_dict(instance), **kwargs)
@@ -127,8 +132,12 @@ class InstrumentedAttribute(QueryableAttribute):
def __get__(self, instance, owner):
if instance is None:
return self
- return self.impl.get(instance_state(instance),
- instance_dict(instance))
+
+ dict_ = instance_dict(instance)
+ if self._supports_population and self.key in dict_:
+ return dict_[self.key]
+ else:
+ return self.impl.get(instance_state(instance),dict_)
def create_proxied_attribute(descriptor):
"""Create an QueryableAttribute / user descriptor hybrid.
@@ -324,33 +333,37 @@ class AttributeImpl(object):
resulting value will be set as the new value for this attribute.
"""
- try:
+ if self.key in dict_:
return dict_[self.key]
- except KeyError:
- # if no history, check for lazy callables, etc.
- if state.committed_state.get(self.key, NEVER_SET) is NEVER_SET:
+ else:
+ # if history present, don't load
+ key = self.key
+ if key not in state.committed_state or \
+ state.committed_state[key] is NEVER_SET:
if passive is PASSIVE_NO_INITIALIZE:
return PASSIVE_NO_RESULT
- if self.key in state.callables:
- callable_ = state.callables[self.key]
- elif self.callable_ is not None:
- callable_ = self.callable_(state)
+ if key in state.callables:
+ callable_ = state.callables[key]
+ value = callable_(passive)
+ elif self.callable_:
+ value = self.callable_(state, passive)
else:
- callable_ = None
-
- if callable_ is not None:
- #if passive is not PASSIVE_OFF:
- # return PASSIVE_NO_RESULT
- value = callable_(passive=passive)
- if value is PASSIVE_NO_RESULT:
- return value
- elif value is not ATTR_WAS_SET:
- return self.set_committed_value(state, dict_, value)
- else:
- if self.key not in dict_:
- return self.get(state, dict_, passive=passive)
- return dict_[self.key]
+ value = ATTR_EMPTY
+
+ if value is PASSIVE_NO_RESULT:
+ return value
+ elif value is ATTR_WAS_SET:
+ try:
+ return dict_[key]
+ except KeyError:
+ # TODO: no test coverage here.
+ raise KeyError(
+ "Deferred loader for attribute "
+ "%r failed to populate "
+ "correctly" % key)
+ elif value is not ATTR_EMPTY:
+ return self.set_committed_value(state, dict_, value)
# Return a new, empty value
return self.initialize(state, dict_)
@@ -650,6 +663,7 @@ class CollectionAttributeImpl(AttributeImpl):
if original is NO_VALUE:
return list(current)
else:
+ # TODO: use the dict() of state, obj here
current_set = util.IdentitySet(current)
original_set = util.IdentitySet(original)
@@ -790,10 +804,8 @@ class CollectionAttributeImpl(AttributeImpl):
collection, user_data = self._initialize_collection(state)
if value:
- for item in value:
- collection.append_without_event(item)
+ collection.append_multiple_without_event(value)
- state.callables.pop(self.key, None)
state.dict[self.key] = user_data
state.commit(dict_, [self.key])
@@ -1070,6 +1082,10 @@ def get_all_pending(state, dict_, key):
state,
key,
passive=PASSIVE_NO_INITIALIZE).sum()
+
+ TODO: we'd like to more closely merge the "history" tuple
+ generation with "get_all_pending()", making the presence
+ of the "History" object optional.
"""
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index b52329523..99e6464f2 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -471,6 +471,9 @@ class CollectionAdapter(object):
The ORM uses an CollectionAdapter exclusively for interaction with
entity collections.
+ The usage of getattr()/setattr() is currently to allow injection
+ of custom methods, such as to unwrap Zope security proxies.
+
"""
def __init__(self, attr, owner_state, data):
self._key = attr.key
@@ -553,6 +556,12 @@ class CollectionAdapter(object):
"""Add or restore an entity to the collection, firing no events."""
getattr(self._data(), '_sa_appender')(item, _sa_initiator=False)
+ def append_multiple_without_event(self, items):
+ """Add or restore an entity to the collection, firing no events."""
+ appender = getattr(self._data(), '_sa_appender')
+ for item in items:
+ appender(item, _sa_initiator=False)
+
def remove_with_event(self, item, initiator=None):
"""Remove an entity from the collection, firing mutation events."""
getattr(self._data(), '_sa_remover')(item, _sa_initiator=initiator)
@@ -563,13 +572,17 @@ class CollectionAdapter(object):
def clear_with_event(self, initiator=None):
"""Empty the collection, firing a mutation event for each entity."""
+
+ remover = getattr(self._data(), '_sa_remover')
for item in list(self):
- self.remove_with_event(item, initiator)
+ remover(item, _sa_initiator=initiator)
def clear_without_event(self):
"""Empty the collection, firing no events."""
+
+ remover = getattr(self._data(), '_sa_remover')
for item in list(self):
- self.remove_without_event(item)
+ remover(item, _sa_initiator=False)
def __iter__(self):
"""Iterate over entities in the collection."""
diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py
index 19c78c5c8..39ea1db35 100644
--- a/lib/sqlalchemy/orm/dependency.py
+++ b/lib/sqlalchemy/orm/dependency.py
@@ -152,10 +152,8 @@ class DependencyProcessor(object):
# detect if there's anything changed or loaded
# by a preprocessor on this state/attribute. if not,
# we should be able to skip it entirely.
- sum_ = uow.get_attribute_history(
- state,
- self.key,
- passive=True).sum()
+ sum_ = attributes.get_all_pending(state, state.dict, self.key)
+
if not sum_:
continue
@@ -177,9 +175,10 @@ class DependencyProcessor(object):
if child_in_cycles:
child_actions = []
- for child_state in sum_:
- if child_state is None:
+ for child in sum_:
+ if child is None:
continue
+ child_state = attributes.instance_state(child)
if child_state not in uow.states:
child_action = (None, None)
else:
@@ -221,6 +220,8 @@ class DependencyProcessor(object):
pass
def prop_has_changes(self, uowcommit, states, isdelete):
+ passive = not isdelete or self.passive_deletes
+
for s in states:
# TODO: add a high speed method
# to InstanceState which returns: attribute
@@ -228,7 +229,7 @@ class DependencyProcessor(object):
history = uowcommit.get_attribute_history(
s,
self.key,
- passive=True)
+ passive=passive)
if history and not history.empty():
return True
else:
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index 30c3a06b7..f1400a8c6 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -121,17 +121,28 @@ class WeakInstanceDict(IdentityMap):
dict.__setitem__(self, state.key, state)
self._manage_incoming_state(state)
-
+
def add(self, state):
- if state.key in self:
- if dict.__getitem__(self, state.key) is not state:
- raise AssertionError("A conflicting state is already "
- "present in the identity map for key %r"
- % (state.key, ))
- else:
- dict.__setitem__(self, state.key, state)
- self._manage_incoming_state(state)
-
+ key = state.key
+ # inline of self.__contains__
+ if dict.__contains__(self, key):
+ try:
+ existing_state = dict.__getitem__(self, key)
+ if existing_state is not state:
+ o = existing_state.obj()
+ if o is None:
+ o = existing_state._is_really_none()
+ if o is not None:
+ raise AssertionError("A conflicting state is already "
+ "present in the identity map for key %r"
+ % (key, ))
+ else:
+ return
+ except KeyError:
+ pass
+ dict.__setitem__(self, key, state)
+ self._manage_incoming_state(state)
+
def remove_key(self, key):
state = dict.__getitem__(self, key)
self.remove(state)
@@ -152,17 +163,16 @@ class WeakInstanceDict(IdentityMap):
self._manage_removed_state(state)
def get(self, key, default=None):
- state = dict.get(self, key, default)
- if state is default:
+ if not dict.__contains__(self, key):
return default
+ state = dict.__getitem__(self, key)
o = state.obj()
if o is None:
o = state._is_really_none()
- if o is None:
- return default
+ if o is None:
+ return default
return o
-
def items(self):
# Py2K
return list(self.iteritems())
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 2a9d3760b..e917c675a 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -315,7 +315,7 @@ class StrategizedProperty(MapperProperty):
_reduce_path(path)), None)
if cls:
try:
- return self.__all_strategies[cls]
+ return self._strategies[cls]
except KeyError:
return self.__init_strategy(cls)
else:
@@ -323,12 +323,12 @@ class StrategizedProperty(MapperProperty):
def _get_strategy(self, cls):
try:
- return self.__all_strategies[cls]
+ return self._strategies[cls]
except KeyError:
return self.__init_strategy(cls)
def __init_strategy(self, cls):
- self.__all_strategies[cls] = strategy = cls(self)
+ self._strategies[cls] = strategy = cls(self)
strategy.init()
return strategy
@@ -341,7 +341,7 @@ class StrategizedProperty(MapperProperty):
create_row_processor(context, path, mapper, row, adapter)
def do_init(self):
- self.__all_strategies = {}
+ self._strategies = {}
self.strategy = self.__init_strategy(self.strategy_class)
def post_instrument_class(self, mapper):
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 2b050ebff..48c37f80d 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -519,7 +519,7 @@ class Mapper(object):
"key columns for mapped table '%s'" %
(self, self.mapped_table.description))
- self.primary_key = primary_key
+ self.primary_key = tuple(primary_key)
self._log("Identified primary key columns: %s", primary_key)
def _configure_properties(self):
@@ -1287,8 +1287,13 @@ class Mapper(object):
def _get_committed_state_attr_by_column(self, state, dict_, column,
passive=False):
- return self._columntoproperty[column]._getcommitted(
- state, dict_, column, passive=passive)
+
+ prop = self._columntoproperty[column]
+ value = state.manager[prop.key].impl.\
+ get_committed_value(state, dict_, passive=passive)
+ if prop.get_col_value:
+ value = prop.get_col_value(column, value)
+ return value
def _optimized_get_statement(self, state, attribute_names):
"""assemble a WHERE clause which retrieves a given state by primary
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index d628d87dc..239159f3e 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -104,7 +104,7 @@ class ColumnProperty(StrategizedProperty):
def do_init(self):
super(ColumnProperty, self).do_init()
if len(self.columns) > 1 and \
- self.parent.primary_key.issuperset(self.columns):
+ set(self.parent.primary_key).issuperset(self.columns):
util.warn(
("On mapper %s, primary key column '%s' is being combined "
"with distinct primary key column '%s' in attribute '%s'. "
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 58c224636..9acb3485f 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -1922,7 +1922,7 @@ class Query(object):
# TODO: no coverage here
return attributes.PASSIVE_NO_RESULT
try:
- state()
+ state(passive)
except orm_exc.ObjectDeletedError:
session._remove_newly_deleted(state)
return None
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 48861085d..392d83d94 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -36,10 +36,8 @@ class InstanceState(object):
self.class_ = obj.__class__
self.manager = manager
self.obj = weakref.ref(obj, self._cleanup)
-
- @util.memoized_property
- def committed_state(self):
- return {}
+ self.callables = {}
+ self.committed_state = {}
@util.memoized_property
def parents(self):
@@ -49,10 +47,6 @@ class InstanceState(object):
def pending(self):
return {}
- @util.memoized_property
- def callables(self):
- return {}
-
@property
def has_identity(self):
return bool(self.key)
@@ -75,10 +69,18 @@ class InstanceState(object):
instance_dict.remove(self)
except AssertionError:
pass
+
# remove possible cycles
- self.__dict__.pop('callables', None)
- self.dispose()
-
+ self.callables.clear()
+
+ # inlining of self.dispose()
+ if self.session_id:
+ try:
+ del self.session_id
+ except AttributeError:
+ pass
+ del self.obj
+
def obj(self):
return None
@@ -251,11 +253,8 @@ class InstanceState(object):
else:
filter_deferred = False
- to_clear = (
- self.__dict__.get('pending', None),
- self.__dict__.get('committed_state', None),
- self.mutable_dict
- )
+ pending = self.__dict__.get('pending', None)
+ mutable_dict = self.mutable_dict
for key in attribute_names:
impl = self.manager[key].impl
@@ -264,18 +263,20 @@ class InstanceState(object):
self.callables[key] = self
dict_.pop(key, None)
- for d in to_clear:
- if d is not None:
- d.pop(key, None)
+ self.committed_state.pop(key, None)
+ if mutable_dict:
+ mutable_dict.pop(key, None)
+ if pending:
+ pending.pop(key, None)
- def __call__(self, **kw):
+ def __call__(self, passive):
"""__call__ allows the InstanceState to act as a deferred
callable for loading expired attributes, which is also
serializable (picklable).
"""
- if kw.get('passive') is PASSIVE_NO_FETCH:
+ if passive is PASSIVE_NO_FETCH:
return PASSIVE_NO_RESULT
toload = self.expired_attributes.\
@@ -407,16 +408,15 @@ class InstanceState(object):
if a value was not populated in state.dict.
"""
-
- self.__dict__.pop('committed_state', None)
- self.__dict__.pop('pending', None)
- if 'callables' in self.__dict__:
- callables = self.callables
- for key in list(callables):
- if key in dict_ and callables[key] is self:
- del callables[key]
+ self.committed_state.clear()
+ self.__dict__.pop('pending', None)
+ callables = self.callables
+ for key in list(callables):
+ if key in dict_ and callables[key] is self:
+ del callables[key]
+
for key in self.manager.mutable_attributes:
if key in dict_:
self.committed_state[key] = self.manager[key].impl.copy(dict_[key])
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index d6fb0c005..c4619d3a7 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -230,7 +230,7 @@ class DeferredColumnLoader(LoaderStrategy):
compare_function=self.columns[0].type.compare_values,
copy_function=self.columns[0].type.copy_value,
mutable_scalars=self.columns[0].type.is_mutable(),
- callable_=self._class_level_loader,
+ callable_=self._load_for_state,
expire_missing=False
)
@@ -244,51 +244,26 @@ class DeferredColumnLoader(LoaderStrategy):
setup_query(context, entity,
path, adapter, **kwargs)
- def _class_level_loader(self, state):
+ def _load_for_state(self, state, passive):
if not state.key:
- return None
-
- return LoadDeferredColumns(state, self.key)
-
-
-log.class_logger(DeferredColumnLoader)
-
-class LoadDeferredColumns(object):
- """serializable loader object used by DeferredColumnLoader"""
+ return attributes.ATTR_EMPTY
- __slots__ = 'state', 'key'
-
- def __init__(self, state, key):
- self.state = state
- self.key = key
-
- def __getstate__(self):
- return self.state, self.key
-
- def __setstate__(self, state):
- self.state, self.key = state
-
- def __call__(self, passive=False):
- state, key = self.state, self.key
-
if passive is attributes.PASSIVE_NO_FETCH:
return attributes.PASSIVE_NO_RESULT
-
- localparent = mapper._state_mapper(state)
+
+ prop = self.parent_property
+ localparent = state.manager.mapper
- prop = localparent._props[key]
- strategy = prop._get_strategy(DeferredColumnLoader)
-
- if strategy.group:
+ if self.group:
toload = [
p.key for p in
localparent.iterate_properties
if isinstance(p, StrategizedProperty) and
isinstance(p.strategy, DeferredColumnLoader) and
- p.group==strategy.group
+ p.group==self.group
]
else:
- toload = [key]
+ toload = [self.key]
# narrow the keys down to just those which have no history
group = [k for k in toload if k in state.unmodified]
@@ -298,13 +273,30 @@ class LoadDeferredColumns(object):
raise orm_exc.DetachedInstanceError(
"Parent instance %s is not bound to a Session; "
"deferred load operation of attribute '%s' cannot proceed" %
- (mapperutil.state_str(state), key)
+ (mapperutil.state_str(state), self.key)
)
query = session.query(localparent)
query._load_on_ident(state.key,
only_load_props=group, refresh_state=state)
return attributes.ATTR_WAS_SET
+
+log.class_logger(DeferredColumnLoader)
+
+class LoadDeferredColumns(object):
+ """serializable loader object used by DeferredColumnLoader"""
+
+ def __init__(self, state, key):
+ self.state = state
+ self.key = key
+
+ def __call__(self, passive=False):
+ state, key = self.state, self.key
+
+ localparent = state.manager.mapper
+ prop = localparent._props[key]
+ strategy = prop._strategies[DeferredColumnLoader]
+ return strategy._load_for_state(state, passive)
class DeferredOption(StrategizedOption):
propagate_to_loaders = True
@@ -398,7 +390,7 @@ class LazyLoader(AbstractRelationshipLoader):
_register_attribute(self,
mapper,
useobject=True,
- callable_=self._class_level_loader,
+ callable_=self._load_for_state,
uselist = self.parent_property.uselist,
backref = self.parent_property.back_populates,
typecallable = self.parent_property.collection_class,
@@ -483,111 +475,20 @@ class LazyLoader(AbstractRelationshipLoader):
criterion = adapt_source(criterion)
return criterion
- def _class_level_loader(self, state):
+ def _load_for_state(self, state, passive):
if not state.key and \
(not self.parent_property.load_on_pending or not state.session_id):
- return None
-
- return LoadLazyAttribute(state, self.key)
-
- def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- key = self.key
- if not self.is_class_level:
- def new_execute(state, dict_, row):
- # we are not the primary manager for this attribute
- # on this class - set up a
- # per-instance lazyloader, which will override the
- # class-level behavior.
- # this currently only happens when using a
- # "lazyload" option on a "no load"
- # attribute - "eager" attributes always have a
- # class-level lazyloader installed.
- state.set_callable(dict_, key, LoadLazyAttribute(state, key))
- else:
- def new_execute(state, dict_, row):
- # we are the primary manager for this attribute on
- # this class - reset its
- # per-instance attribute state, so that the class-level
- # lazy loader is
- # executed when next referenced on this instance.
- # this is needed in
- # populate_existing() types of scenarios to reset
- # any existing state.
- state.reset(dict_, key)
-
- return new_execute, None, None
-
- @classmethod
- def _create_lazy_clause(cls, prop, reverse_direction=False):
- binds = util.column_dict()
- lookup = util.column_dict()
- equated_columns = util.column_dict()
-
- if reverse_direction and prop.secondaryjoin is None:
- for l, r in prop.local_remote_pairs:
- _list = lookup.setdefault(r, [])
- _list.append((r, l))
- equated_columns[l] = r
- else:
- for l, r in prop.local_remote_pairs:
- _list = lookup.setdefault(l, [])
- _list.append((l, r))
- equated_columns[r] = l
-
- def col_to_bind(col):
- if col in lookup:
- for tobind, equated in lookup[col]:
- if equated in binds:
- return None
- if col not in binds:
- binds[col] = sql.bindparam(None, None, type_=col.type)
- return binds[col]
- return None
-
- lazywhere = prop.primaryjoin
-
- if prop.secondaryjoin is None or not reverse_direction:
- lazywhere = visitors.replacement_traverse(
- lazywhere, {}, col_to_bind)
-
- if prop.secondaryjoin is not None:
- secondaryjoin = prop.secondaryjoin
- if reverse_direction:
- secondaryjoin = visitors.replacement_traverse(
- secondaryjoin, {}, col_to_bind)
- lazywhere = sql.and_(lazywhere, secondaryjoin)
-
- bind_to_col = dict((binds[col].key, col) for col in binds)
+ return attributes.ATTR_EMPTY
- return lazywhere, bind_to_col, equated_columns
-
-log.class_logger(LazyLoader)
-
-class LoadLazyAttribute(object):
- """serializable loader object used by LazyLoader"""
-
- __slots__ = 'state', 'key'
-
- def __init__(self, state, key):
- self.state = state
- self.key = key
-
- def __getstate__(self):
- return self.state, self.key
-
- def __setstate__(self, state):
- self.state, self.key = state
-
- def __call__(self, passive=False):
- state, key = self.state, self.key
- instance_mapper = mapper._state_mapper(state)
- prop = instance_mapper._props[key]
- strategy = prop._get_strategy(LazyLoader)
+ instance_mapper = state.manager.mapper
+ prop = self.parent_property
+ key = self.key
+ prop_mapper = self.mapper
pending = not state.key
if (
passive is attributes.PASSIVE_NO_FETCH and
- not strategy.use_get
+ not self.use_get
) or (
passive is attributes.PASSIVE_ONLY_PERSISTENT and
pending
@@ -595,7 +496,7 @@ class LoadLazyAttribute(object):
return attributes.PASSIVE_NO_RESULT
session = sessionlib._state_session(state)
- if session is None:
+ if not session:
raise orm_exc.DetachedInstanceError(
"Parent instance %s is not bound to a Session; "
"lazy load operation of attribute '%s' cannot proceed" %
@@ -604,19 +505,20 @@ class LoadLazyAttribute(object):
# if we have a simple primary key load, check the
# identity map without generating a Query at all
- if strategy.use_get:
+ if self.use_get:
if session._flushing:
get_attr = instance_mapper._get_committed_state_attr_by_column
else:
get_attr = instance_mapper._get_state_attr_by_column
+ dict_ = state.dict
ident = [
get_attr(
state,
state.dict,
- strategy._equated_columns[pk],
+ self._equated_columns[pk],
passive=passive)
- for pk in prop.mapper.primary_key
+ for pk in prop_mapper.primary_key
]
if attributes.PASSIVE_NO_RESULT in ident:
return attributes.PASSIVE_NO_RESULT
@@ -624,14 +526,14 @@ class LoadLazyAttribute(object):
if _none_set.issuperset(ident):
return None
- ident_key = prop.mapper.identity_key_from_primary_key(ident)
+ ident_key = prop_mapper.identity_key_from_primary_key(ident)
instance = Query._get_from_identity(session, ident_key, passive)
if instance is not None:
return instance
elif passive is attributes.PASSIVE_NO_FETCH:
return attributes.PASSIVE_NO_RESULT
- q = session.query(prop.mapper)._adapt_all_clauses()
+ q = session.query(prop_mapper)._adapt_all_clauses()
# don't autoflush on pending
if pending:
@@ -643,7 +545,7 @@ class LoadLazyAttribute(object):
if state.load_options:
q = q._conditional_options(*state.load_options)
- if strategy.use_get:
+ if self.use_get:
return q._load_on_ident(ident_key)
if prop.order_by:
@@ -657,7 +559,7 @@ class LoadLazyAttribute(object):
not isinstance(rev.strategy, LazyLoader):
q = q.options(EagerLazyOption((rev.key,), lazy='select'))
- lazy_clause = strategy.lazy_clause(state)
+ lazy_clause = self.lazy_clause(state)
if pending:
bind_values = sql_util.bind_values(lazy_clause)
@@ -667,7 +569,7 @@ class LoadLazyAttribute(object):
q = q.filter(lazy_clause)
result = q.all()
- if strategy.uselist:
+ if self.uselist:
return result
else:
l = len(result)
@@ -682,6 +584,95 @@ class LoadLazyAttribute(object):
else:
return None
+ def create_row_processor(self, selectcontext, path, mapper, row, adapter):
+ key = self.key
+ if not self.is_class_level:
+ def new_execute(state, dict_, row):
+ # we are not the primary manager for this attribute
+ # on this class - set up a
+ # per-instance lazyloader, which will override the
+ # class-level behavior.
+ # this currently only happens when using a
+ # "lazyload" option on a "no load"
+ # attribute - "eager" attributes always have a
+ # class-level lazyloader installed.
+ state.set_callable(dict_, key, LoadLazyAttribute(state, key))
+ else:
+ def new_execute(state, dict_, row):
+ # we are the primary manager for this attribute on
+ # this class - reset its
+ # per-instance attribute state, so that the class-level
+ # lazy loader is
+ # executed when next referenced on this instance.
+ # this is needed in
+ # populate_existing() types of scenarios to reset
+ # any existing state.
+ state.reset(dict_, key)
+
+ return new_execute, None, None
+
+ @classmethod
+ def _create_lazy_clause(cls, prop, reverse_direction=False):
+ binds = util.column_dict()
+ lookup = util.column_dict()
+ equated_columns = util.column_dict()
+
+ if reverse_direction and prop.secondaryjoin is None:
+ for l, r in prop.local_remote_pairs:
+ _list = lookup.setdefault(r, [])
+ _list.append((r, l))
+ equated_columns[l] = r
+ else:
+ for l, r in prop.local_remote_pairs:
+ _list = lookup.setdefault(l, [])
+ _list.append((l, r))
+ equated_columns[r] = l
+
+ def col_to_bind(col):
+ if col in lookup:
+ for tobind, equated in lookup[col]:
+ if equated in binds:
+ return None
+ if col not in binds:
+ binds[col] = sql.bindparam(None, None, type_=col.type)
+ return binds[col]
+ return None
+
+ lazywhere = prop.primaryjoin
+
+ if prop.secondaryjoin is None or not reverse_direction:
+ lazywhere = visitors.replacement_traverse(
+ lazywhere, {}, col_to_bind)
+
+ if prop.secondaryjoin is not None:
+ secondaryjoin = prop.secondaryjoin
+ if reverse_direction:
+ secondaryjoin = visitors.replacement_traverse(
+ secondaryjoin, {}, col_to_bind)
+ lazywhere = sql.and_(lazywhere, secondaryjoin)
+
+ bind_to_col = dict((binds[col].key, col) for col in binds)
+
+ return lazywhere, bind_to_col, equated_columns
+
+log.class_logger(LazyLoader)
+
+class LoadLazyAttribute(object):
+ """serializable loader object used by LazyLoader"""
+
+ def __init__(self, state, key):
+ self.state = state
+ self.key = key
+
+ def __call__(self, passive=False):
+ state, key = self.state, self.key
+ instance_mapper = state.manager.mapper
+ prop = instance_mapper._props[key]
+ strategy = prop._strategies[LazyLoader]
+
+ return strategy._load_for_state(state, passive)
+
+
class ImmediateLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index ab62e5324..1e1eda4a3 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -166,6 +166,8 @@ class UOWTransaction(object):
self.attributes[hashkey] = (history, state_history, passive)
else:
impl = state.manager[key].impl
+ # TODO: store the history as (state, object) tuples
+ # so we don't have to keep converting here
history = impl.get_history(state, state.dict, passive=passive)
if history and impl.uses_objects:
state_history = history.as_state()
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index cb6b27e36..67f37e763 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -1591,7 +1591,11 @@ class ColumnCollectionConstraint(Constraint):
return self.columns.contains_column(col)
def __iter__(self):
- return iter(self.columns)
+ # inlining of
+ # return iter(self.columns)
+ # ColumnCollection->OrderedProperties->OrderedDict
+ ordered_dict = self.columns._data
+ return (ordered_dict[key] for key in ordered_dict._list)
def __len__(self):
return len(self.columns)
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index fd5e1449d..57ce02a1e 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -18,10 +18,9 @@ class NamedTuple(tuple):
"""
def __new__(cls, vals, labels=None):
- vals = list(vals)
t = tuple.__new__(cls, vals)
if labels:
- t.__dict__ = dict(itertools.izip(labels, vals))
+ t.__dict__.update(zip(labels, vals))
t._labels = labels
return t
diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py
index 71a5153e6..8a92ec07c 100644
--- a/test/aaa_profiling/test_orm.py
+++ b/test/aaa_profiling/test_orm.py
@@ -54,7 +54,7 @@ class MergeTest(_base.MappedTest):
# bigger operation so using a small variance
@profiling.function_call_count(86, variance=0.05,
- versions={'2.4': 68, '2.5':94, '3': 89})
+ versions={'2.4': 64, '2.5':94, '3': 89})
def go():
return sess2.merge(p1, load=False)
p2 = go()
@@ -172,7 +172,7 @@ class LoadManyToOneFromIdentityTest(_base.MappedTest):
parents = sess.query(Parent).all()
children = sess.query(Child).all()
- @profiling.function_call_count(23979, {'2.5':28974, '3':25978})
+ @profiling.function_call_count(17987, {'3':18987})
def go():
for p in parents:
p.child
diff --git a/test/dialect/test_mssql.py b/test/dialect/test_mssql.py
index f3643c4df..68203cfea 100644
--- a/test/dialect/test_mssql.py
+++ b/test/dialect/test_mssql.py
@@ -764,19 +764,19 @@ class SchemaTest(TestBase):
return self.ddl_compiler.get_column_specification(self.column)
def test_that_mssql_default_nullability_emits_null(self):
- eq_("test_column VARCHAR NULL", self._column_spec())
+ eq_("test_column VARCHAR(max) NULL", self._column_spec())
def test_that_mssql_none_nullability_does_not_emit_nullability(self):
self.column.nullable = None
- eq_("test_column VARCHAR", self._column_spec())
+ eq_("test_column VARCHAR(max)", self._column_spec())
def test_that_mssql_specified_nullable_emits_null(self):
self.column.nullable = True
- eq_("test_column VARCHAR NULL", self._column_spec())
+ eq_("test_column VARCHAR(max) NULL", self._column_spec())
def test_that_mssql_specified_not_nullable_emits_not_null(self):
self.column.nullable = False
- eq_("test_column VARCHAR NOT NULL", self._column_spec())
+ eq_("test_column VARCHAR(max) NOT NULL", self._column_spec())
def full_text_search_missing():
@@ -1424,16 +1424,16 @@ class TypesTest(TestBase, AssertsExecutionResults, ComparesTables):
'NCHAR(1)'),
(mssql.MSNChar, [1], {'collation': 'Latin1_General_CI_AS'},
'NCHAR(1) COLLATE Latin1_General_CI_AS'),
-
+
(mssql.MSString, [], {},
- 'VARCHAR'),
+ 'VARCHAR(max)'),
(mssql.MSString, [1], {},
'VARCHAR(1)'),
(mssql.MSString, [1], {'collation': 'Latin1_General_CI_AS'},
'VARCHAR(1) COLLATE Latin1_General_CI_AS'),
(mssql.MSNVarchar, [], {},
- 'NVARCHAR'),
+ 'NVARCHAR(max)'),
(mssql.MSNVarchar, [1], {},
'NVARCHAR(1)'),
(mssql.MSNVarchar, [1], {'collation': 'Latin1_General_CI_AS'},
diff --git a/test/orm/inheritance/test_poly_linked_list.py b/test/orm/inheritance/test_poly_linked_list.py
index 8b300f06a..db622920a 100644
--- a/test/orm/inheritance/test_poly_linked_list.py
+++ b/test/orm/inheritance/test_poly_linked_list.py
@@ -113,7 +113,7 @@ class PolymorphicCircularTest(_base.MappedTest):
table3_mapper = mapper(Table3, table3, inherits=table1_mapper, polymorphic_identity='table3')
configure_mappers()
- assert table1_mapper.primary_key == [table1.c.id], table1_mapper.primary_key
+ assert table1_mapper.primary_key == (table1.c.id,), table1_mapper.primary_key
@testing.fails_on('maxdb', 'FIXME: unknown')
def testone(self):
diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py
index b543e79a1..91f61c05c 100644
--- a/test/orm/test_attributes.py
+++ b/test/orm/test_attributes.py
@@ -49,16 +49,28 @@ class AttributesTest(_base.ORMTest):
def test_pickleness(self):
instrumentation.register_class(MyTest)
instrumentation.register_class(MyTest2)
- attributes.register_attribute(MyTest, 'user_id', uselist=False, useobject=False)
- attributes.register_attribute(MyTest, 'user_name', uselist=False, useobject=False)
- attributes.register_attribute(MyTest, 'email_address', uselist=False, useobject=False)
- attributes.register_attribute(MyTest, 'some_mutable_data', mutable_scalars=True, copy_function=list, compare_function=cmp, uselist=False, useobject=False)
- attributes.register_attribute(MyTest2, 'a', uselist=False, useobject=False)
- attributes.register_attribute(MyTest2, 'b', uselist=False, useobject=False)
+ attributes.register_attribute(MyTest, 'user_id', uselist=False,
+ useobject=False)
+ attributes.register_attribute(MyTest, 'user_name',
+ uselist=False, useobject=False)
+ attributes.register_attribute(MyTest, 'email_address',
+ uselist=False, useobject=False)
+ attributes.register_attribute(MyTest, 'some_mutable_data',
+ mutable_scalars=True, copy_function=list,
+ compare_function=cmp, uselist=False, useobject=False)
+ attributes.register_attribute(MyTest2, 'a', uselist=False,
+ useobject=False)
+ attributes.register_attribute(MyTest2, 'b', uselist=False,
+ useobject=False)
+
# shouldnt be pickling callables at the class level
- def somecallable(*args, **kw):
+
+ def somecallable(state, passive):
return None
- attributes.register_attribute(MyTest, "mt2", uselist = True, trackparent=True, callable_=somecallable, useobject=True)
+
+ attributes.register_attribute(MyTest, 'mt2', uselist=True,
+ trackparent=True, callable_=somecallable,
+ useobject=True)
o = MyTest()
o.mt2.append(MyTest2())
@@ -259,26 +271,26 @@ class AttributesTest(_base.ORMTest):
b1, b2, b3, b4 = Bar(id='b1'), Bar(id='b2'), Bar(id='b3'), Bar(id='b4')
- def loadcollection(**kw):
- if kw.get('passive') is attributes.PASSIVE_NO_FETCH:
+ def loadcollection(state, passive):
+ if passive is attributes.PASSIVE_NO_FETCH:
return attributes.PASSIVE_NO_RESULT
return [b1, b2]
- def loadscalar(**kw):
- if kw.get('passive') is attributes.PASSIVE_NO_FETCH:
+ def loadscalar(state, passive):
+ if passive is attributes.PASSIVE_NO_FETCH:
return attributes.PASSIVE_NO_RESULT
return b2
attributes.register_attribute(Foo, 'bars',
uselist=True,
useobject=True,
- callable_=lambda o:loadcollection,
+ callable_=loadcollection,
extension=[ReceiveEvents('bars')])
attributes.register_attribute(Foo, 'bar',
uselist=False,
useobject=True,
- callable_=lambda o:loadscalar,
+ callable_=loadscalar,
extension=[ReceiveEvents('bar')])
attributes.register_attribute(Foo, 'scalar',
@@ -341,14 +353,17 @@ class AttributesTest(_base.ORMTest):
instrumentation.register_class(Bar)
bar1, bar2, bar3 = [Bar(id=1), Bar(id=2), Bar(id=3)]
- def func1(**kw):
- if kw.get('passive') is attributes.PASSIVE_NO_FETCH:
+ def func1(state, passive):
+ if passive is attributes.PASSIVE_NO_FETCH:
return attributes.PASSIVE_NO_RESULT
return [bar1, bar2, bar3]
- attributes.register_attribute(Foo, 'bars', uselist=True, callable_=lambda o:func1, useobject=True, extension=[ReceiveEvents()])
- attributes.register_attribute(Bar, 'foos', uselist=True, useobject=True, backref="bars")
+ attributes.register_attribute(Foo, 'bars', uselist=True,
+ callable_=func1, useobject=True,
+ extension=[ReceiveEvents()])
+ attributes.register_attribute(Bar, 'foos', uselist=True,
+ useobject=True, backref='bars')
x = Foo()
assert_raises(AssertionError, Bar(id=4).foos.append, x)
@@ -423,9 +438,9 @@ class AttributesTest(_base.ORMTest):
b = Blog()
p1 = Post()
attributes.instance_state(b).set_callable(attributes.instance_dict(b),
- 'posts', lambda **kw:[p1])
+ 'posts', lambda passive:[p1])
attributes.instance_state(p1).set_callable(attributes.instance_dict(p1),
- 'blog', lambda **kw:b)
+ 'blog', lambda passive:b)
p1, attributes.instance_state(b).commit_all(attributes.instance_dict(b))
# no orphans (called before the lazy loaders fire off)
@@ -452,18 +467,18 @@ class AttributesTest(_base.ORMTest):
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
- def func1(**kw):
+ def func1(state, passive):
return "this is the foo attr"
- def func2(**kw):
+ def func2(state, passive):
return "this is the bar attr"
- def func3(**kw):
+ def func3(state, passive):
return "this is the shared attr"
attributes.register_attribute(Foo, 'element', uselist=False,
- callable_=lambda o:func1, useobject=True)
+ callable_=func1, useobject=True)
attributes.register_attribute(Foo, 'element2', uselist=False,
- callable_=lambda o:func3, useobject=True)
+ callable_=func3, useobject=True)
attributes.register_attribute(Bar, 'element', uselist=False,
- callable_=lambda o:func2, useobject=True)
+ callable_=func2, useobject=True)
x = Foo()
y = Bar()
@@ -525,14 +540,17 @@ class AttributesTest(_base.ORMTest):
instrumentation.register_class(Bar)
bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3), Bar(id=4)]
- def func1(**kw):
+ def func1(state, passive):
return "this is func 1"
- def func2(**kw):
+ def func2(state, passive):
return [bar1, bar2, bar3]
- attributes.register_attribute(Foo, 'col1', uselist=False, callable_=lambda o:func1, useobject=True)
- attributes.register_attribute(Foo, 'col2', uselist=True, callable_=lambda o:func2, useobject=True)
- attributes.register_attribute(Bar, 'id', uselist=False, useobject=True)
+ attributes.register_attribute(Foo, 'col1', uselist=False,
+ callable_=func1, useobject=True)
+ attributes.register_attribute(Foo, 'col2', uselist=True,
+ callable_=func2, useobject=True)
+ attributes.register_attribute(Bar, 'id', uselist=False,
+ useobject=True)
x = Foo()
attributes.instance_state(x).commit_all(attributes.instance_dict(x))
@@ -864,9 +882,6 @@ class BackrefTest(_base.ORMTest):
# and this condition changes.
assert c1 in p1.children
-
-
-
class PendingBackrefTest(_base.ORMTest):
def setup(self):
global Post, Blog, called, lazy_load
@@ -888,19 +903,20 @@ class PendingBackrefTest(_base.ORMTest):
called = [0]
lazy_load = []
- def lazy_posts(instance):
- def load(**kw):
- if kw['passive'] is not attributes.PASSIVE_NO_FETCH:
- called[0] += 1
- return lazy_load
- else:
- return attributes.PASSIVE_NO_RESULT
- return load
+ def lazy_posts(state, passive):
+ if passive is not attributes.PASSIVE_NO_FETCH:
+ called[0] += 1
+ return lazy_load
+ else:
+ return attributes.PASSIVE_NO_RESULT
instrumentation.register_class(Post)
instrumentation.register_class(Blog)
- attributes.register_attribute(Post, 'blog', uselist=False, backref='posts', trackparent=True, useobject=True)
- attributes.register_attribute(Blog, 'posts', uselist=True, backref='blog', callable_=lazy_posts, trackparent=True, useobject=True)
+ attributes.register_attribute(Post, 'blog', uselist=False,
+ backref='posts', trackparent=True, useobject=True)
+ attributes.register_attribute(Blog, 'posts', uselist=True,
+ backref='blog', callable_=lazy_posts, trackparent=True,
+ useobject=True)
def test_lazy_add(self):
global lazy_load
@@ -1384,15 +1400,16 @@ class HistoryTest(_base.ORMTest):
pass
lazy_load = []
- def lazyload(instance):
- def load(**kw):
- return lazy_load
- return load
+ def lazyload(state, passive):
+ return lazy_load
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
- attributes.register_attribute(Foo, 'bars', uselist=True, backref='foo', trackparent=True, callable_=lazyload, useobject=True)
- attributes.register_attribute(Bar, 'foo', uselist=False, backref='bars', trackparent=True, useobject=True)
+ attributes.register_attribute(Foo, 'bars', uselist=True,
+ backref='foo', trackparent=True, callable_=lazyload,
+ useobject=True)
+ attributes.register_attribute(Bar, 'foo', uselist=False,
+ backref='bars', trackparent=True, useobject=True)
bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3), Bar(id=4)]
lazy_load = [bar1, bar2, bar3]
@@ -1419,14 +1436,13 @@ class HistoryTest(_base.ORMTest):
pass
lazy_load = []
- def lazyload(instance):
- def load(**kw):
- return lazy_load
- return load
+ def lazyload(state, passive):
+ return lazy_load
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
- attributes.register_attribute(Foo, 'bars', uselist=True, callable_=lazyload, trackparent=True, useobject=True)
+ attributes.register_attribute(Foo, 'bars', uselist=True,
+ callable_=lazyload, trackparent=True, useobject=True)
bar1, bar2, bar3, bar4 = [Bar(id=1), Bar(id=2), Bar(id=3), Bar(id=4)]
lazy_load = [bar1, bar2, bar3]
@@ -1459,14 +1475,13 @@ class HistoryTest(_base.ORMTest):
pass
lazy_load = None
- def lazyload(instance):
- def load(**kw):
- return lazy_load
- return load
+ def lazyload(state, passive):
+ return lazy_load
instrumentation.register_class(Foo)
- attributes.register_attribute(Foo, 'bar', uselist=False, callable_=lazyload, useobject=False)
- lazy_load = "hi"
+ attributes.register_attribute(Foo, 'bar', uselist=False,
+ callable_=lazyload, useobject=False)
+ lazy_load = 'hi'
# with scalar non-object and active_history=False, the lazy callable is only executed on gets, not history
# operations
@@ -1497,14 +1512,14 @@ class HistoryTest(_base.ORMTest):
pass
lazy_load = None
- def lazyload(instance):
- def load(**kw):
- return lazy_load
- return load
+ def lazyload(state, passive):
+ return lazy_load
instrumentation.register_class(Foo)
- attributes.register_attribute(Foo, 'bar', uselist=False, callable_=lazyload, useobject=False, active_history=True)
- lazy_load = "hi"
+ attributes.register_attribute(Foo, 'bar', uselist=False,
+ callable_=lazyload, useobject=False,
+ active_history=True)
+ lazy_load = 'hi'
# active_history=True means the lazy callable is executed on set as well as get,
# causing the old value to appear in the history
@@ -1537,14 +1552,13 @@ class HistoryTest(_base.ORMTest):
pass
lazy_load = None
- def lazyload(instance):
- def load(**kw):
- return lazy_load
- return load
+ def lazyload(state, passive):
+ return lazy_load
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
- attributes.register_attribute(Foo, 'bar', uselist=False, callable_=lazyload, trackparent=True, useobject=True)
+ attributes.register_attribute(Foo, 'bar', uselist=False,
+ callable_=lazyload, trackparent=True, useobject=True)
bar1, bar2 = [Bar(id=1), Bar(id=2)]
lazy_load = bar1
diff --git a/test/orm/test_extendedattr.py b/test/orm/test_extendedattr.py
index ec7963c29..2eca1ac38 100644
--- a/test/orm/test_extendedattr.py
+++ b/test/orm/test_extendedattr.py
@@ -197,18 +197,21 @@ class UserDefinedExtensionTest(_base.ORMTest):
instrumentation.register_class(Foo)
instrumentation.register_class(Bar)
- def func1(**kw):
- print "func1"
+ def func1(state, passive):
return "this is the foo attr"
- def func2(**kw):
- print "func2"
+ def func2(state, passive):
return "this is the bar attr"
- def func3(**kw):
- print "func3"
+ def func3(state, passive):
return "this is the shared attr"
- attributes.register_attribute(Foo, 'element', uselist=False, callable_=lambda o:func1, useobject=True)
- attributes.register_attribute(Foo, 'element2', uselist=False, callable_=lambda o:func3, useobject=True)
- attributes.register_attribute(Bar, 'element', uselist=False, callable_=lambda o:func2, useobject=True)
+ attributes.register_attribute(Foo, 'element',
+ uselist=False, callable_=func1,
+ useobject=True)
+ attributes.register_attribute(Foo, 'element2',
+ uselist=False, callable_=func3,
+ useobject=True)
+ attributes.register_attribute(Bar, 'element',
+ uselist=False, callable_=func2,
+ useobject=True)
x = Foo()
y = Bar()
@@ -224,8 +227,10 @@ class UserDefinedExtensionTest(_base.ORMTest):
instrumentation.register_class(Post)
instrumentation.register_class(Blog)
- attributes.register_attribute(Post, 'blog', uselist=False, backref='posts', trackparent=True, useobject=True)
- attributes.register_attribute(Blog, 'posts', uselist=True, backref='blog', trackparent=True, useobject=True)
+ attributes.register_attribute(Post, 'blog', uselist=False,
+ backref='posts', trackparent=True, useobject=True)
+ attributes.register_attribute(Blog, 'posts', uselist=True,
+ backref='blog', trackparent=True, useobject=True)
b = Blog()
(p1, p2, p3) = (Post(), Post(), Post())
b.posts.append(p1)
diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py
index 766addc05..10049175a 100644
--- a/test/orm/test_unitofworkv2.py
+++ b/test/orm/test_unitofworkv2.py
@@ -223,6 +223,74 @@ class RudimentaryFlushTest(UOWTest):
{'id':u1.id}
),
)
+
+ def test_many_to_one_delete_unloaded(self):
+ mapper(User, users)
+ mapper(Address, addresses, properties={
+ 'parent':relationship(User)
+ })
+
+ parent = User(name='p1')
+ c1, c2 = Address(email_address='c1', parent=parent), \
+ Address(email_address='c2', parent=parent)
+
+ session = Session()
+ session.add_all([c1, c2])
+ session.add(parent)
+
+ session.flush()
+
+ pid = parent.id
+ c1id = c1.id
+ c2id = c2.id
+
+ session.expire(parent)
+ session.expire(c1)
+ session.expire(c2)
+
+ session.delete(c1)
+ session.delete(c2)
+ session.delete(parent)
+
+ # testing that relationships
+ # are loaded even if all ids/references are
+ # expired
+ self.assert_sql_execution(
+ testing.db,
+ session.flush,
+ AllOf(
+ # ensure all three m2os are loaded.
+ # the selects here are in fact unexpiring
+ # each row - the m2o comes from the identity map.
+ CompiledSQL(
+ "SELECT addresses.id AS addresses_id, addresses.user_id AS "
+ "addresses_user_id, addresses.email_address AS "
+ "addresses_email_address FROM addresses WHERE addresses.id = "
+ ":param_1",
+ lambda ctx: {'param_1': c1id}
+ ),
+ CompiledSQL(
+ "SELECT addresses.id AS addresses_id, addresses.user_id AS "
+ "addresses_user_id, addresses.email_address AS "
+ "addresses_email_address FROM addresses WHERE addresses.id = "
+ ":param_1",
+ lambda ctx: {'param_1': c2id}
+ ),
+ CompiledSQL(
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = :param_1",
+ lambda ctx: {'param_1': pid}
+ ),
+ ),
+ CompiledSQL(
+ "DELETE FROM addresses WHERE addresses.id = :id",
+ lambda ctx: [{'id': c1id}, {'id': c2id}]
+ ),
+ CompiledSQL(
+ "DELETE FROM users WHERE users.id = :id",
+ lambda ctx: {'id': pid}
+ ),
+ )
def test_many_to_many(self):
mapper(Item, items, properties={
@@ -502,17 +570,52 @@ class SingleCycleTest(UOWTest):
sess = create_session()
n1 = Node(data='n1')
n1.children.append(Node(data='n11'))
- n1.children.append(Node(data='n12'))
+ n12 = Node(data='n12')
+ n1.children.append(n12)
n1.children.append(Node(data='n13'))
n1.children[1].children.append(Node(data='n121'))
n1.children[1].children.append(Node(data='n122'))
n1.children[1].children.append(Node(data='n123'))
sess.add(n1)
- sess.flush()
-# self.assert_sql_execution(
-# testing.db,
- # sess.flush,
- # )
+ self.assert_sql_execution(
+ testing.db,
+ sess.flush,
+ CompiledSQL(
+ "INSERT INTO nodes (parent_id, data) VALUES "
+ "(:parent_id, :data)",
+ lambda ctx:{'parent_id':None, 'data':'n1'}
+ ),
+ CompiledSQL(
+ "INSERT INTO nodes (parent_id, data) VALUES "
+ "(:parent_id, :data)",
+ lambda ctx:{'parent_id':n1.id, 'data':'n11'}
+ ),
+ CompiledSQL(
+ "INSERT INTO nodes (parent_id, data) VALUES "
+ "(:parent_id, :data)",
+ lambda ctx:{'parent_id':n1.id, 'data':'n12'}
+ ),
+ CompiledSQL(
+ "INSERT INTO nodes (parent_id, data) VALUES "
+ "(:parent_id, :data)",
+ lambda ctx:{'parent_id':n1.id, 'data':'n13'}
+ ),
+ CompiledSQL(
+ "INSERT INTO nodes (parent_id, data) VALUES "
+ "(:parent_id, :data)",
+ lambda ctx:{'parent_id':n12.id, 'data':'n121'}
+ ),
+ CompiledSQL(
+ "INSERT INTO nodes (parent_id, data) VALUES "
+ "(:parent_id, :data)",
+ lambda ctx:{'parent_id':n12.id, 'data':'n122'}
+ ),
+ CompiledSQL(
+ "INSERT INTO nodes (parent_id, data) VALUES "
+ "(:parent_id, :data)",
+ lambda ctx:{'parent_id':n12.id, 'data':'n123'}
+ ),
+ )
def test_singlecycle_flush_size(self):
mapper(Node, nodes, properties={
@@ -548,6 +651,76 @@ class SingleCycleTest(UOWTest):
n1.children
self._assert_uow_size(sess, 2)
+ def test_delete_unloaded_m2o(self):
+ mapper(Node, nodes, properties={
+ 'parent':relationship(Node, remote_side=nodes.c.id)
+ })
+
+ parent = Node()
+ c1, c2 = Node(parent=parent), Node(parent=parent)
+
+ session = Session()
+ session.add_all([c1, c2])
+ session.add(parent)
+
+ session.flush()
+
+ pid = parent.id
+ c1id = c1.id
+ c2id = c2.id
+
+ session.expire(parent)
+ session.expire(c1)
+ session.expire(c2)
+
+ session.delete(c1)
+ session.delete(c2)
+ session.delete(parent)
+
+ # testing that relationships
+ # are loaded even if all ids/references are
+ # expired
+ self.assert_sql_execution(
+ testing.db,
+ session.flush,
+ AllOf(
+ # ensure all three m2os are loaded.
+ # the selects here are in fact unexpiring
+ # each row - the m2o comes from the identity map.
+ CompiledSQL(
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS "
+ "nodes_parent_id, "
+ "nodes.data AS nodes_data FROM nodes "
+ "WHERE nodes.id = :param_1",
+ lambda ctx: {'param_1': pid}
+ ),
+ CompiledSQL(
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS "
+ "nodes_parent_id, "
+ "nodes.data AS nodes_data FROM nodes "
+ "WHERE nodes.id = :param_1",
+ lambda ctx: {'param_1': c1id}
+ ),
+ CompiledSQL(
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS "
+ "nodes_parent_id, "
+ "nodes.data AS nodes_data FROM nodes "
+ "WHERE nodes.id = :param_1",
+ lambda ctx: {'param_1': c2id}
+ ),
+ ),
+ CompiledSQL(
+ "DELETE FROM nodes WHERE nodes.id = :id",
+ lambda ctx: [{'id': c1id}, {'id': c2id}]
+ ),
+ CompiledSQL(
+ "DELETE FROM nodes WHERE nodes.id = :id",
+ lambda ctx: {'id': pid}
+ ),
+ )
+
+
+
class SingleCyclePlusAttributeTest(_base.MappedTest,
testing.AssertsExecutionResults, AssertsUOW):
@classmethod
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index 49065a590..700f7b7de 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -567,13 +567,11 @@ class UnicodeTest(TestBase, AssertsExecutionResults):
s = String()
uni = s.dialect_impl(unicode_engine.dialect).bind_processor(unicode_engine.dialect)
- # Py3K
# this is not the unicode type - no warning
+ # Py3K
#uni(b'x')
- #assert_raises(exc.SAWarning, uni, b'x')
#assert isinstance(uni(unicodedata), bytes)
# Py2K
- # this is not the unicode type - no warning
uni('x')
assert isinstance(uni(unicodedata), str)
# end Py2K