diff options
author | Diana Clarke <diana.joan.clarke@gmail.com> | 2012-11-19 17:19:24 -0500 |
---|---|---|
committer | Diana Clarke <diana.joan.clarke@gmail.com> | 2012-11-19 17:19:24 -0500 |
commit | aba75454d064b157b546dbc379043f58c83a2b6d (patch) | |
tree | 9921c6114bcccca3b72c50b150f42d3ae6f2c76d /lib/sqlalchemy/orm/loading.py | |
parent | dc81c91882a24e12679ff978aeb10be8b651a582 (diff) | |
download | sqlalchemy-aba75454d064b157b546dbc379043f58c83a2b6d.tar.gz |
just a pep8 pass of lib/sqlalchemy/orm/
Diffstat (limited to 'lib/sqlalchemy/orm/loading.py')
-rw-r--r-- | lib/sqlalchemy/orm/loading.py | 10 |
1 files changed, 8 insertions, 2 deletions
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index df1477210..a5d156a1f 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -23,6 +23,7 @@ sessionlib = util.importlater("sqlalchemy.orm", "session") _new_runid = util.counter() + def instances(query, cursor, context): """Return an ORM result as an iterator.""" session = query.session @@ -96,6 +97,7 @@ def instances(query, cursor, context): if not query._yield_per: break + def merge_result(query, iterator, load=True): """Merge a result into this :class:`.Query` object's Session.""" @@ -137,6 +139,7 @@ def merge_result(query, iterator, load=True): finally: session.autoflush = autoflush + def get_from_identity(session, key, passive): """Look up the given key in the given session's identity map, check the object for expired state if found. @@ -165,6 +168,7 @@ def get_from_identity(session, key, passive): else: return None + def load_on_ident(query, key, refresh_state=None, lockmode=None, only_load_props=None): @@ -222,6 +226,7 @@ def load_on_ident(query, key, except orm_exc.NoResultFound: return None + def instance_processor(mapper, context, path, adapter, polymorphic_from=None, only_load_props=None, @@ -475,7 +480,6 @@ def instance_processor(mapper, context, path, adapter, if isnew: state.manager.dispatch.refresh(state, context, attrs) - if result is not None: if append_result: for fn in append_result: @@ -491,6 +495,7 @@ def instance_processor(mapper, context, path, adapter, return instance return _instance + def _populators(mapper, context, path, row, adapter, new_populators, existing_populators, eager_populators): """Produce a collection of attribute level row processor @@ -509,6 +514,7 @@ def _populators(mapper, context, path, row, adapter, if delayed_populators: new_populators.extend(delayed_populators) + def _configure_subclass_mapper(mapper, context, path, adapter): """Produce a mapper level row processor callable factory for mappers inheriting this one.""" @@ -538,6 +544,7 @@ def _configure_subclass_mapper(mapper, context, path, adapter): polymorphic_from=mapper) return configure_subclass_mapper + def load_scalar_attributes(mapper, state, attribute_names): """initiate a column-based attribute refresh operation.""" @@ -599,4 +606,3 @@ def load_scalar_attributes(mapper, state, attribute_names): # may not complete (even if PK attributes are assigned) if has_key and result is None: raise orm_exc.ObjectDeletedError(state) - |