summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2015-02-28 13:30:22 -0500
committerMike Bayer <mike_mp@zzzcomputing.com>2015-02-28 13:30:22 -0500
commitc81a8143535cb9005a88969f2d828af8b628c10e (patch)
treeae752c105203a60eb92cdb7be789f1a259e0134b
parent50fcf349fb0afab78af8bb05066143aea7519359 (diff)
downloadsqlalchemy-c81a8143535cb9005a88969f2d828af8b628c10e.tar.gz
- another approach. put row_processor back, don't do the thing
with ad-hoc callables anymore, but at least make the path to row_processor being called more local to loading.py
-rw-r--r--lib/sqlalchemy/engine/result.py8
-rw-r--r--lib/sqlalchemy/orm/base.py12
-rw-r--r--lib/sqlalchemy/orm/interfaces.py43
-rw-r--r--lib/sqlalchemy/orm/loading.py106
-rw-r--r--lib/sqlalchemy/orm/query.py101
-rw-r--r--lib/sqlalchemy/orm/strategies.py83
6 files changed, 145 insertions, 208 deletions
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 7a8067f1e..70f13f713 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -126,6 +126,9 @@ class RowProxy(BaseRowProxy):
__hash__ = None
+ def _getter(self, key):
+ return self._parent._getter(key)
+
def _op(self, other, op):
return op(tuple(self), tuple(other)) \
if isinstance(other, RowProxy) \
@@ -355,7 +358,10 @@ class ResultMetaData(object):
def _getter(self, key):
index = self._index_of(key)
- return operator.itemgetter(index)
+ if index is None:
+ return None
+ else:
+ return operator.itemgetter(index)
def __getstate__(self):
return {
diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py
index 7330caa3d..ec7598931 100644
--- a/lib/sqlalchemy/orm/base.py
+++ b/lib/sqlalchemy/orm/base.py
@@ -352,10 +352,16 @@ def _is_aliased_class(entity):
class _IdxLoader(object):
- __slots__ = ('__call__',)
+ __slots__ = ('initial_key', '__call__',)
- def setup(self, index):
- self.__call__ = operator.itemgetter(index)
+ def __init__(self, initial_key):
+ self.initial_key = initial_key
+ self.__call__ = self._initial_get
+
+ def _initial_get(self, row):
+ getter = row._getter(self.initial_key)
+ self.__call__ = getter
+ return self(row)
def _entity_descriptor(entity, key):
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 45ab9b48f..b1b73144a 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -109,7 +109,7 @@ class MapperProperty(_MappedAttribute, InspectionAttr, util.MemoizedSlots):
def setup(
self, context, query_entity, path, mapper,
- adapter, column_collection, populators, **kw):
+ adapter, column_collection, **kw):
"""Called by Query for the purposes of constructing a SQL statement.
Each MapperProperty associated with the target mapper processes the
@@ -118,14 +118,10 @@ class MapperProperty(_MappedAttribute, InspectionAttr, util.MemoizedSlots):
"""
- def setup_for_missing_attribute(
- self, context, query_entity, path, mapper, populators, **kw):
- """Setup a strategy for a Query where this property was not yet
- included.
-
- This function can do everything that setup() does, *except* attempt
- to modify the SQL query; the method may be called after the query
- has already been emitted and results are being received.
+ def create_row_processor(self, context, path,
+ mapper, result, adapter, populators):
+ """Produce row processing functions and append to the given
+ set of populators lists.
"""
@@ -496,7 +492,7 @@ class StrategizedProperty(MapperProperty):
def setup(
self, context, query_entity, path, mapper,
- adapter, column_collection, populators, **kw):
+ adapter, column_collection, **kw):
loadopt = self._get_context_loader(context, path)
if loadopt and loadopt.strategy:
@@ -506,19 +502,20 @@ class StrategizedProperty(MapperProperty):
strat.setup_query(
context, query_entity, path, mapper,
- adapter, column_collection, populators, loadopt, **kw)
+ adapter, column_collection, loadopt, **kw)
- def setup_for_missing_attribute(
- self, context, query_entity, path, mapper,
- populators, **kw):
+ def create_row_processor(
+ self, context, path, mapper,
+ adapter, result, populators, **kw):
loadopt = self._get_context_loader(context, path)
if loadopt and loadopt.strategy:
strat = self._get_strategy(loadopt.strategy)
else:
strat = self.strategy
- strat.setup_for_missing_attribute(
- context, query_entity, path, mapper, populators, loadopt, **kw)
+ strat.create_row_processor(
+ context, path,
+ mapper, adapter, result, populators, loadopt, **kw)
def do_init(self):
self._strategies = {}
@@ -618,7 +615,7 @@ class LoaderStrategy(object):
def setup_query(
self, context, query_entity, path, mapper,
- adapter, column_collection, populators, loadopt, **kw):
+ adapter, column_collection, loadopt, **kw):
"""Establish column and other state for a given QueryContext.
This method fulfills the contract specified by MapperProperty.setup().
@@ -628,14 +625,10 @@ class LoaderStrategy(object):
"""
- def setup_for_missing_attribute(
- self, context, query_entity, path, mapper,
- populators, loadopt, **kw):
- """Establish loader behavior for an attribute that's not accommodated
- by the query.
-
- This is used for polymorphic loading when a subclass load is detected.
- """
+ def create_row_processor(
+ self, context, path,
+ mapper, adapter, result, populators, loadopt, **kw):
+ pass
def __str__(self):
return str(self.parent_property)
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index add2ae43b..f3edc9e29 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -44,15 +44,13 @@ def instances(query, cursor, context):
return tuple(fn(x) for x, fn in zip(row, filter_fns))
if context._predefined_statement:
- # if the Query didn't actually build the statement,
- # we use the result set to determine where the columns
- # we're looking for are located.
- context._setup_column_processors(
- [
- (cursor._index_of(col), col)
- for col in context.primary_columns + context.secondary_columns
- ]
- )
+ for query_entity in query._entities:
+ query_entity.setup_context(query, context, cursor)
+ else:
+ for create_row_processor, path, mapper, adapter, populators in \
+ context.todo:
+ create_row_processor(
+ context, path, mapper, adapter, cursor, populators)
try:
(labels, process) = list(zip(*context.loaders))
@@ -227,43 +225,20 @@ def load_on_ident(query, key,
def _instance_processor(
- mapper, props_toload, context, column_collection,
+ mapper, props_toload, context, result, column_collection,
query_entity, path, adapter,
only_load_props=None, refresh_state=None,
polymorphic_discriminator=None,
_polymorphic_from=None,
- _polymorphic_pk_getters=None,
- _polymorphic_from_populators=None):
+ _polymorphic_pk_getters=None):
"""Produce a mapper level row processor callable
which processes rows into mapped instances."""
load_is_polymorphic = mapper.polymorphic_on is not None
- if _polymorphic_from_populators is not None:
- # if we are a subclass loader, then we use the collection
- # of populators that were already set up for us.
-
- populators = _polymorphic_from_populators
-
+ if result is None:
for prop in props_toload:
- prop.setup_for_missing_attribute(
- context, query_entity, path, mapper,
- _polymorphic_from_populators
- )
- else:
- populators = collections.defaultdict(list)
-
- if load_is_polymorphic:
- per_mapper_populators = collections.defaultdict(
- lambda: collections.defaultdict(list))
- per_mapper_populators[mapper] = populators
-
- # establish all columns and column loaders up front
- # across subclass mappers as well. Categorize loaders
- # into mapper-specific buckets.
-
- for prop in props_toload:
prop.setup(
context,
query_entity,
@@ -271,9 +246,6 @@ def _instance_processor(
mapper,
adapter,
column_collection,
- populators=per_mapper_populators[prop.parent]
- if load_is_polymorphic and not mapper.isa(prop.parent)
- else populators,
only_load_props=only_load_props,
)
@@ -285,18 +257,24 @@ def _instance_processor(
if adapter:
pk_cols = [adapter.columns[c] for c in pk_cols]
- pk_getters = [_IdxLoader()] * len(pk_cols)
- context.column_processors.extend(
- (pk_col, pk_getter.setup)
- for pk_col, pk_getter in zip(pk_cols, pk_getters)
- )
+ pk_getters = [_IdxLoader(pk_col) for pk_col in pk_cols]
identity_class = mapper._identity_class
+ populators = collections.defaultdict(list)
props = mapper._props.values()
if only_load_props is not None:
props = (p for p in props if p.key in only_load_props)
+ if result is None:
+ for prop in props:
+ context.todo.append(
+ (prop.create_row_processor, path, mapper, adapter, populators))
+ else:
+ for prop in props:
+ prop.create_row_processor(
+ context, path, mapper, adapter, result, populators)
+
propagate_options = context.propagate_options
if propagate_options:
load_path = context.query._current_path + path \
@@ -440,8 +418,7 @@ def _instance_processor(
# if we are doing polymorphic, dispatch to a different _instance()
# method specific to the subclass mapper
_instance = _decorate_polymorphic_switch(
- _instance, context, mapper, props_toload,
- per_mapper_populators, path,
+ _instance, context, mapper, query_entity, path,
polymorphic_discriminator, adapter, pk_getters)
return _instance
@@ -529,9 +506,8 @@ def _validate_version_id(mapper, state, dict_, row, adapter):
def _decorate_polymorphic_switch(
- instance_fn, context, mapper, props_toload,
- per_mapper_populators, path,
- polymorphic_discriminator, adapter, pk_getters):
+ instance_fn, context, mapper, query_entity,
+ path, polymorphic_discriminator, adapter, pk_getters):
if polymorphic_discriminator is not None:
polymorphic_on = polymorphic_discriminator
@@ -543,13 +519,12 @@ def _decorate_polymorphic_switch(
if adapter:
polymorphic_on = adapter.columns[polymorphic_on]
- polymorphic_getter = _IdxLoader()
if polymorphic_discriminator is not mapper.polymorphic_on:
context.primary_columns.append(polymorphic_on)
- context.column_processors.append(
- (polymorphic_on, polymorphic_getter.setup))
- props_setup = set(props_toload)
+ polymorphic_getter = _IdxLoader(polymorphic_on)
+
+ result = []
def configure_subclass_mapper(discriminator):
try:
@@ -562,35 +537,18 @@ def _decorate_polymorphic_switch(
if sub_mapper is mapper:
return None
- populators = collections.defaultdict(list)
- for super_mapper in sub_mapper.iterate_to_root():
- mapper_populators = per_mapper_populators[super_mapper]
- for k, collection in mapper_populators.items():
- populators[k].extend(collection)
- if super_mapper is mapper:
- break
-
- keys_setup = set(p.key for p in props_setup)
-
- props_needed = set(
- prop for prop in sub_mapper._props.values()
- ).difference(props_setup)
-
- props_needed = props_needed.difference(
- p for p in props_needed if p.key in keys_setup
- )
-
return _instance_processor(
- sub_mapper, props_needed, context, None, None,
- path, adapter, _polymorphic_from=mapper,
- _polymorphic_pk_getters=pk_getters,
- _polymorphic_from_populators=populators)
+ sub_mapper, None, context, result[0], None,
+ query_entity, path, adapter, _polymorphic_from=mapper,
+ _polymorphic_pk_getters=pk_getters)
polymorphic_instances = util.PopulateDict(
configure_subclass_mapper
)
def polymorphic_instance(row):
+ if not result:
+ result.append(row)
discriminator = polymorphic_getter(row)
if discriminator is not None:
_instance = polymorphic_instances[discriminator]
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 147b4cf0c..0191c38c1 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -20,7 +20,7 @@ database to return iterable result sets.
"""
from __future__ import absolute_import
-import collections
+import operator
from itertools import chain
from . import (
@@ -2936,6 +2936,10 @@ class Query(object):
def _compile_context(self, labels=True):
context = QueryContext(self)
+ if context.statement is not None:
+ context._predefined_statement = True
+ return context
+
context.labels = labels
context._for_update_arg = self._for_update_arg
@@ -2943,10 +2947,6 @@ class Query(object):
for entity in self._entities:
entity.setup_context(self, context)
- if context.statement is not None:
- context._predefined_statement = True
- return context
-
for rec in context.create_eager_joins:
strategy = rec[0]
strategy(*rec[1:])
@@ -3018,7 +3018,8 @@ class Query(object):
equivs = self.__all_equivs()
- outer_adapter = sql_util.ColumnAdapter(inner, equivs)
+ outer_adapter = context.outer_adapter = \
+ sql_util.ColumnAdapter(inner, equivs)
statement = sql.select(
[inner] + context.secondary_columns,
@@ -3046,22 +3047,6 @@ class Query(object):
statement.append_order_by(*context.eager_order_by)
- context._setup_column_processors(
- enumerate(
- col for (label, col) in statement._columns_plus_names
- ),
- outer_adapter
- )
-
- context._setup_column_processors(
- enumerate(context.primary_columns, 0)
- )
- context._setup_column_processors(
- enumerate(
- context.secondary_columns,
- len(context.primary_columns) + len(order_by_col_expr))
- )
-
return statement
def _simple_statement(self, context):
@@ -3099,14 +3084,6 @@ class Query(object):
if context.eager_order_by:
statement.append_order_by(*context.eager_order_by)
- # initiate indexes for column processor functions
- # that have been established
- context._setup_column_processors(
- enumerate(
- col for (label, col) in statement._columns_plus_names
- )
- )
-
return statement
def _adjust_for_single_inheritance(self, context):
@@ -3284,30 +3261,22 @@ class _MapperEntity(_QueryEntity):
return ret
- def setup_context(self, query, context):
+ def setup_context(self, query, context, result=None):
adapter = self._get_entity_clauses(query, context)
- # TODO: this was part of concrete, how does this apply
- # now? At least for textual, something is needed.
- # (or do we?)
- # if not adapter and self.mapper._requires_row_aliasing:
- # adapter = sql_util.ColumnAdapter(
- # self.selectable,
- # self.mapper._equivalent_columns)
-
- # if self._adapted_selectable is None:
- context.froms += (self.selectable,)
-
- if context.order_by is False and self.mapper.order_by:
- context.order_by = self.mapper.order_by
-
- # apply adaptation to the mapper's order_by if needed.
- if adapter:
- context.order_by = adapter.adapt_list(
- util.to_list(
- context.order_by
+ if result is None:
+ context.froms += (self.selectable,)
+
+ if context.order_by is False and self.mapper.order_by:
+ context.order_by = self.mapper.order_by
+
+ # apply adaptation to the mapper's order_by if needed.
+ if adapter:
+ context.order_by = adapter.adapt_list(
+ util.to_list(
+ context.order_by
+ )
)
- )
if self._with_polymorphic:
poly_properties = self.mapper._iterate_polymorphic_properties(
@@ -3331,6 +3300,7 @@ class _MapperEntity(_QueryEntity):
self.mapper,
props_toload,
context,
+ result,
context.primary_columns,
self,
self.path,
@@ -3516,11 +3486,12 @@ class _BundleEntity(_QueryEntity):
for ent in self._entities:
ent.setup_entity(ext_info, aliased_adapter)
- def setup_context(self, query, context, loaders=None):
+ def setup_context(self, query, context, result=None, loaders=None):
our_loaders = []
for ent in self._entities:
- ent.setup_context(query, context, loaders=our_loaders)
+ ent.setup_context(
+ query, context, result=result, loaders=our_loaders)
labels, procs = zip(
*our_loaders
@@ -3664,14 +3635,13 @@ class _ColumnEntity(_QueryEntity):
def _resolve_expr_against_query_aliases(self, query, expr, context):
return query._adapt_clause(expr, False, True)
- def setup_context(self, query, context, loaders=None):
+ def setup_context(self, query, context, result=None, loaders=None):
column = self._resolve_expr_against_query_aliases(
query, self.column, context)
context.froms += tuple(self.froms)
context.primary_columns.append(column)
- loader = _IdxLoader()
- context.column_processors.append((column, loader.setup))
+ loader = _IdxLoader(column)
if loaders is not None:
loaders.append((self._label_name, loader))
@@ -3684,7 +3654,7 @@ class _ColumnEntity(_QueryEntity):
class QueryContext(object):
multi_row_eager_loaders = False
- adapter = None
+ outer_adapter = None
froms = ()
for_update = None
order_by = False
@@ -3713,7 +3683,6 @@ class QueryContext(object):
self.refresh_state = query._refresh_state
self.primary_columns = []
self.secondary_columns = []
- self.column_processors = []
self.eager_order_by = []
self.eager_joins = {}
self.create_eager_joins = []
@@ -3721,16 +3690,14 @@ class QueryContext(object):
o.propagate_to_loaders)
self.attributes = query._attributes.copy()
self.loaders = []
+ self.todo = []
- def _setup_column_processors(self, cols, adapter=None):
- d = collections.defaultdict(
- lambda: -1,
- [(col, idx) for idx, col in cols]
- )
- for col, fn in self.column_processors:
- if adapter:
- col = adapter.columns[col]
- fn(d[col])
+ def _getter(self, column, result):
+
+ if self.outer_adapter:
+ column = self.outer_adapter.columns[column]
+
+ return result._getter(column)
class AliasOption(interfaces.MapperOption):
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index aab455771..c5d54bab3 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -24,7 +24,6 @@ from .interfaces import (
)
from .session import _state_session
import itertools
-import operator
def _register_attribute(
@@ -135,7 +134,7 @@ class ColumnLoader(LoaderStrategy):
def setup_query(
self, context, query_entity, path, mapper,
- adapter, column_collection, populators, loadopt, **kw):
+ adapter, column_collection, loadopt, **kw):
for c in self.columns:
if adapter:
c = adapter.columns[c]
@@ -148,19 +147,21 @@ class ColumnLoader(LoaderStrategy):
# should be added here so that we need not bother
# querying out every column.
- def quick_populate(index):
- if index == -1:
- populators["expire"].append((self.key, True))
- else:
- populators["quick"].append(
- (self.key, operator.itemgetter(index))
- )
- context.column_processors.append((self.columns[0], quick_populate))
+ def create_row_processor(
+ self, context, path,
+ mapper, adapter, result, populators, loadopt, **kw):
- def setup_for_missing_attribute(
- self, context, query_entity, path, mapper,
- populators, loadopt, **kw):
- populators["expire"].append((self.key, True))
+ # look through list of columns represented here
+ # to see which, if any, is present in the row.
+ for col in self.columns:
+ if adapter:
+ col = adapter.columns[col]
+ getter = context._getter(col, result)
+ if getter:
+ populators["quick"].append((self.key, getter))
+ break
+ else:
+ populators["expire"].append((self.key, True))
def init_class_attribute(self, mapper):
self.is_class_level = True
@@ -204,7 +205,7 @@ class DeferredColumnLoader(LoaderStrategy):
def setup_query(
self, context, query_entity, path, mapper,
- adapter, column_collection, populators, loadopt,
+ adapter, column_collection, loadopt,
only_load_props=None, **kw):
if (
@@ -226,22 +227,28 @@ class DeferredColumnLoader(LoaderStrategy):
)
):
self.parent_property._get_strategy_by_cls(ColumnLoader).\
- setup_query(
- context, query_entity,
- path, mapper, adapter,
- populators, loadopt, **kw)
- else:
- self.setup_for_missing_attribute(
- context, query_entity, path, mapper, populators, loadopt, **kw
- )
+ setup_query(context, query_entity,
+ path, mapper, adapter, column_collection, loadopt,
+ **kw)
- def setup_for_missing_attribute(
- self, context, query_entity, path, mapper, populators,
- loadopt, **kw):
- if not self.is_class_level:
+ def create_row_processor(
+ self, context, path,
+ mapper, adapter, result, populators, loadopt, **kw):
+ col = self.columns[0]
+ if adapter:
+ col = adapter.columns[col]
+
+ getter = context._getter(col, result)
+ if getter:
+ self.parent_property._get_strategy_by_cls(ColumnLoader).\
+ create_row_processor(
+ context, path, loadopt, mapper, result,
+ adapter, populators)
+
+ elif not self.is_class_level:
set_deferred_for_local_state = \
InstanceState._instance_level_callable_processor(
- self.parent.class_manager,
+ mapper.class_manager,
LoadDeferredColumns(self.key), self.key)
populators["new"].append((self.key, set_deferred_for_local_state))
else:
@@ -336,8 +343,8 @@ class NoLoader(AbstractRelationshipLoader):
)
def create_row_processor(
- self, context, path, loadopt, mapper,
- result, adapter, populators):
+ self, context, path,
+ mapper, adapter, result, populators, loadopt, **kw):
def invoke_no_load(state, dict_, row):
state._initialize(self.key)
populators["new"].append((self.key, invoke_no_load))
@@ -625,8 +632,8 @@ class LazyLoader(AbstractRelationshipLoader):
return None
def create_row_processor(
- self, context, path, loadopt,
- mapper, result, adapter, populators):
+ self, context, path,
+ mapper, adapter, result, populators, loadopt, **kw):
key = self.key
if not self.is_class_level:
# we are not the primary manager for this attribute
@@ -688,8 +695,8 @@ class ImmediateLoader(AbstractRelationshipLoader):
pass
def create_row_processor(
- self, context, path, loadopt,
- mapper, result, adapter, populators):
+ self, context, path,
+ mapper, adapter, result, populators, loadopt, **kw):
def load_immediate(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
@@ -1007,8 +1014,8 @@ class SubqueryLoader(AbstractRelationshipLoader):
self._load()
def create_row_processor(
- self, context, path, loadopt,
- mapper, result, adapter, populators):
+ self, context, path,
+ mapper, adapter, result, populators, loadopt, **kw):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
@@ -1432,8 +1439,8 @@ class JoinedLoader(AbstractRelationshipLoader):
return False
def create_row_processor(
- self, context, path, loadopt, mapper,
- result, adapter, populators):
+ self, context, path,
+ mapper, adapter, result, populators, loadopt, **kw):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "