diff options
author | Mike Bayer <mike_mp@zzzcomputing.com> | 2015-03-01 16:09:11 -0500 |
---|---|---|
committer | Mike Bayer <mike_mp@zzzcomputing.com> | 2015-03-01 16:09:11 -0500 |
commit | 79cbd377a62d291103aa0e378f0665e2e09185b2 (patch) | |
tree | a9aaebe8daeef5f0b4bb4f229a75ffeaad007051 /lib/sqlalchemy/orm/loading.py | |
parent | c5edbc6fdc611d3c812735d83fe056fbb7d113f5 (diff) | |
download | sqlalchemy-79cbd377a62d291103aa0e378f0665e2e09185b2.tar.gz |
- squash-merge the final row_proc integration branch. this is
a much more modest outcome than what we started with. The
work of create_row_processor() for ColumnProperty objects
is essentially done at query setup time combined with some
lookups in _instance_processor().
- to allow this change for deferred columns, deferred columns
no longer search for themselves in the result. If they've been
set up as deferred without any explicit directive to undefer them,
then this is what was asked for. if we don't do this,
then we're stuck with this performance penalty for all deferred
columns which in the vast majority of typical use cases (e.g. loading
large, legacy tables or tables with many/large very seldom
used values) won't be present in the result and won't be accessed at all.
Diffstat (limited to 'lib/sqlalchemy/orm/loading.py')
-rw-r--r-- | lib/sqlalchemy/orm/loading.py | 95 |
1 files changed, 85 insertions, 10 deletions
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index c59257039..64c7e171c 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -18,6 +18,7 @@ from .. import util from . import attributes, exc as orm_exc from ..sql import util as sql_util from .util import _none_set, state_str +from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE from .. import exc as sa_exc import collections @@ -218,10 +219,56 @@ def load_on_ident(query, key, return None -def instance_processor(mapper, context, result, path, adapter, - only_load_props=None, refresh_state=None, - polymorphic_discriminator=None, - _polymorphic_from=None): +def _setup_entity_query( + context, mapper, query_entity, + path, adapter, column_collection, + with_polymorphic=None, only_load_props=None, + polymorphic_discriminator=None, **kw): + + if with_polymorphic: + poly_properties = mapper._iterate_polymorphic_properties( + with_polymorphic) + else: + poly_properties = mapper._polymorphic_properties + + quick_populators = {} + + path.set( + context.attributes, + "memoized_setups", + quick_populators) + + for value in poly_properties: + if only_load_props and \ + value.key not in only_load_props: + continue + value.setup( + context, + query_entity, + path, + adapter, + only_load_props=only_load_props, + column_collection=column_collection, + memoized_populators=quick_populators, + **kw + ) + + if polymorphic_discriminator is not None and \ + polymorphic_discriminator \ + is not mapper.polymorphic_on: + + if adapter: + pd = adapter.columns[polymorphic_discriminator] + else: + pd = polymorphic_discriminator + column_collection.append(pd) + + +def _instance_processor( + mapper, context, result, path, adapter, + only_load_props=None, refresh_state=None, + polymorphic_discriminator=None, + _polymorphic_from=None): """Produce a mapper level row processor callable which processes rows into mapped instances.""" @@ -240,13 +287,41 @@ def instance_processor(mapper, context, result, path, adapter, populators = collections.defaultdict(list) - props = mapper._props.values() + props = mapper._prop_set if only_load_props is not None: - props = (p for p in props if p.key in only_load_props) + props = props.intersection( + mapper._props[k] for k in only_load_props) + + quick_populators = path.get( + context.attributes, "memoized_setups", _none_set) for prop in props: - prop.create_row_processor( - context, path, mapper, result, adapter, populators) + if prop in quick_populators: + # this is an inlined path just for column-based attributes. + col = quick_populators[prop] + if col is _DEFER_FOR_STATE: + populators["new"].append( + (prop.key, prop._deferred_column_loader)) + elif col is _SET_DEFERRED_EXPIRED: + # note that in this path, we are no longer + # searching in the result to see if the column might + # be present in some unexpected way. + populators["expire"].append((prop.key, False)) + else: + if adapter: + col = adapter.columns[col] + getter = result._getter(col) + if getter: + populators["quick"].append((prop.key, getter)) + else: + # fall back to the ColumnProperty itself, which + # will iterate through all of its columns + # to see if one fits + prop.create_row_processor( + context, path, mapper, result, adapter, populators) + else: + prop.create_row_processor( + context, path, mapper, result, adapter, populators) propagate_options = context.propagate_options if propagate_options: @@ -388,7 +463,7 @@ def instance_processor(mapper, context, result, path, adapter, return instance - if not _polymorphic_from and not refresh_state: + if mapper.polymorphic_map and not _polymorphic_from and not refresh_state: # if we are doing polymorphic, dispatch to a different _instance() # method specific to the subclass mapper _instance = _decorate_polymorphic_switch( @@ -503,7 +578,7 @@ def _decorate_polymorphic_switch( if sub_mapper is mapper: return None - return instance_processor( + return _instance_processor( sub_mapper, context, result, path, adapter, _polymorphic_from=mapper) |