diff options
| author | Mike Bayer <mike_mp@zzzcomputing.com> | 2012-04-24 18:06:27 -0400 |
|---|---|---|
| committer | Mike Bayer <mike_mp@zzzcomputing.com> | 2012-04-24 18:06:27 -0400 |
| commit | 9cf10db8aa4692dc615f1a03db5ffe342c321586 (patch) | |
| tree | bc175ec68845968fd307317c062db44319d1688b /lib/sqlalchemy | |
| parent | cfe56e3735d2ba34923c36e9f015253e535ed1bd (diff) | |
| download | sqlalchemy-9cf10db8aa4692dc615f1a03db5ffe342c321586.tar.gz | |
- [feature] Calling rollback() within a
session.begin_nested() will now only expire
those objects that had net changes within the
scope of that transaction, that is objects which
were dirty or were modified on a flush. This
allows the typical use case for begin_nested(),
that of altering a small subset of objects, to
leave in place the data from the larger enclosing
set of objects that weren't modified in
that sub-transaction. [ticket:2452]
- inline the "register_newly_XYZ" functions to operate
upon collections to reduce method calls
Diffstat (limited to 'lib/sqlalchemy')
| -rw-r--r-- | lib/sqlalchemy/orm/query.py | 34 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/session.py | 98 | ||||
| -rw-r--r-- | lib/sqlalchemy/orm/unitofwork.py | 15 |
3 files changed, 84 insertions, 63 deletions
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index dda231e0c..5cf9ea5cf 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -2424,7 +2424,7 @@ class Query(object): try: state(passive) except orm_exc.ObjectDeletedError: - session._remove_newly_deleted(state) + session._remove_newly_deleted([state]) return None return instance else: @@ -2650,18 +2650,20 @@ class Query(object): result = session.execute(delete_stmt, params=self._params) if synchronize_session == 'evaluate': - for obj in objs_to_expunge: - session._remove_newly_deleted(attributes.instance_state(obj)) + session._remove_newly_deleted([attributes.instance_state(obj) + for obj in objs_to_expunge]) elif synchronize_session == 'fetch': target_mapper = self._mapper_zero() for primary_key in matched_rows: + # TODO: inline this and call remove_newly_deleted + # once identity_key = target_mapper.identity_key_from_primary_key( list(primary_key)) if identity_key in session.identity_map: session._remove_newly_deleted( - attributes.instance_state( + [attributes.instance_state( session.identity_map[identity_key] - ) + )] ) session.dispatch.after_bulk_delete(session, self, context, result) @@ -2788,7 +2790,7 @@ class Query(object): if synchronize_session == 'evaluate': target_cls = self._mapper_zero().class_ - + states = set() for obj in matched_objects: state, dict_ = attributes.instance_state(obj),\ attributes.instance_dict(obj) @@ -2806,18 +2808,24 @@ class Query(object): state.expire_attributes(dict_, set(evaluated_keys). difference(to_evaluate)) + states.add(state) + session._register_altered(states) elif synchronize_session == 'fetch': target_mapper = self._mapper_zero() - for primary_key in matched_rows: - identity_key = target_mapper.identity_key_from_primary_key( + states = set([ + attributes.instance_state(session.identity_map[identity_key]) + for identity_key in [ + target_mapper.identity_key_from_primary_key( list(primary_key)) - if identity_key in session.identity_map: - session.expire( - session.identity_map[identity_key], - [_attr_as_key(k) for k in values] - ) + for primary_key in matched_rows + ] + ]) + attrib = [_attr_as_key(k) for k in values] + for state in states: + session._expire_state(state, attrib) + session._register_altered(states) session.dispatch.after_bulk_update(session, self, context, result) diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index 7c2cd8f0e..eb15e033e 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -211,6 +211,7 @@ class SessionTransaction(object): if not self._is_transaction_boundary: self._new = self._parent._new self._deleted = self._parent._deleted + self._dirty = self._parent._dirty return if not self.session._flushing: @@ -218,8 +219,9 @@ class SessionTransaction(object): self._new = weakref.WeakKeyDictionary() self._deleted = weakref.WeakKeyDictionary() + self._dirty = weakref.WeakKeyDictionary() - def _restore_snapshot(self): + def _restore_snapshot(self, dirty_only=False): assert self._is_transaction_boundary for s in set(self._new).union(self.session._new): @@ -236,7 +238,8 @@ class SessionTransaction(object): assert not self.session._deleted for s in self.session.identity_map.all_states(): - s.expire(s.dict, self.session.identity_map._modified) + if not dirty_only or s.modified or s in self._dirty: + s.expire(s.dict, self.session.identity_map._modified) def _remove_snapshot(self): assert self._is_transaction_boundary @@ -351,7 +354,7 @@ class SessionTransaction(object): "Session's state has been changed on " "a non-active transaction - this state " "will be discarded.") - self._restore_snapshot() + self._restore_snapshot(dirty_only=self.nested) self.close() if self._parent and _capture_exception: @@ -366,7 +369,7 @@ class SessionTransaction(object): t[1].rollback() if self.session._enable_transaction_accounting: - self._restore_snapshot() + self._restore_snapshot(dirty_only=self.nested) self.session.dispatch.after_rollback(self.session) @@ -1185,53 +1188,62 @@ class Session(object): elif self.transaction: self.transaction._deleted.pop(state, None) - def _register_newly_persistent(self, state): - mapper = _state_mapper(state) - - # prevent against last minute dereferences of the object - obj = state.obj() - if obj is not None: - - instance_key = mapper._identity_key_from_state(state) - - if _none_set.issubset(instance_key[1]) and \ - not mapper.allow_partial_pks or \ - _none_set.issuperset(instance_key[1]): - raise exc.FlushError( - "Instance %s has a NULL identity key. If this is an " - "auto-generated value, check that the database table " - "allows generation of new primary key values, and that " - "the mapped Column object is configured to expect these " - "generated values. Ensure also that this flush() is " - "not occurring at an inappropriate time, such as within " - "a load() event." % mapperutil.state_str(state) - ) + def _register_newly_persistent(self, states): + for state in states: + mapper = _state_mapper(state) + + # prevent against last minute dereferences of the object + obj = state.obj() + if obj is not None: + + instance_key = mapper._identity_key_from_state(state) + + if _none_set.issubset(instance_key[1]) and \ + not mapper.allow_partial_pks or \ + _none_set.issuperset(instance_key[1]): + raise exc.FlushError( + "Instance %s has a NULL identity key. If this is an " + "auto-generated value, check that the database table " + "allows generation of new primary key values, and that " + "the mapped Column object is configured to expect these " + "generated values. Ensure also that this flush() is " + "not occurring at an inappropriate time, such as within " + "a load() event." % mapperutil.state_str(state) + ) - if state.key is None: - state.key = instance_key - elif state.key != instance_key: - # primary key switch. use discard() in case another - # state has already replaced this one in the identity - # map (see test/orm/test_naturalpks.py ReversePKsTest) - self.identity_map.discard(state) - state.key = instance_key + if state.key is None: + state.key = instance_key + elif state.key != instance_key: + # primary key switch. use discard() in case another + # state has already replaced this one in the identity + # map (see test/orm/test_naturalpks.py ReversePKsTest) + self.identity_map.discard(state) + state.key = instance_key - self.identity_map.replace(state) - state.commit_all(state.dict, self.identity_map) + self.identity_map.replace(state) + state.commit_all(state.dict, self.identity_map) + self._register_altered(states) # remove from new last, might be the last strong ref - if state in self._new: - if self._enable_transaction_accounting and self.transaction: - self.transaction._new[state] = True + for state in set(states).intersection(self._new): self._new.pop(state) - def _remove_newly_deleted(self, state): + def _register_altered(self, states): if self._enable_transaction_accounting and self.transaction: - self.transaction._deleted[state] = True + for state in states: + if state in self._new: + self.transaction._new[state] = True + else: + self.transaction._dirty[state] = True - self.identity_map.discard(state) - self._deleted.pop(state, None) - state.deleted = True + def _remove_newly_deleted(self, states): + for state in states: + if self._enable_transaction_accounting and self.transaction: + self.transaction._deleted[state] = True + + self.identity_map.discard(state) + self._deleted.pop(state, None) + state.deleted = True def add(self, instance): """Place an object in the ``Session``. diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py index 3523e7d06..bc3be8b41 100644 --- a/lib/sqlalchemy/orm/unitofwork.py +++ b/lib/sqlalchemy/orm/unitofwork.py @@ -339,13 +339,14 @@ class UOWTransaction(object): execute() method has succeeded and the transaction has been committed. """ - for state, (isdelete, listonly) in self.states.iteritems(): - if isdelete: - self.session._remove_newly_deleted(state) - else: - # if listonly: - # debug... would like to see how many do this - self.session._register_newly_persistent(state) + states = set(self.states) + isdel = set( + s for (s, (isdelete, listonly)) in self.states.iteritems() + if isdelete + ) + other = states.difference(isdel) + self.session._remove_newly_deleted(isdel) + self.session._register_newly_persistent(other) class IterateMappersMixin(object): def _mappers(self, uow): |
