summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2015-02-26 13:40:09 -0500
committerMike Bayer <mike_mp@zzzcomputing.com>2015-02-26 13:40:09 -0500
commit84a894c635b7b47731a7ae1def0090d57e9b9e79 (patch)
tree62249f851b2f9bbd0b55ca764ec5a93e2d8d4f24
parenta60ff8f91bf7d995781e6029f9a2a3148e187553 (diff)
downloadsqlalchemy-84a894c635b7b47731a7ae1def0090d57e9b9e79.tar.gz
- the start of the polymorphic loading approach.
-rw-r--r--lib/sqlalchemy/orm/loading.py121
-rw-r--r--test/orm/test_loading.py24
2 files changed, 114 insertions, 31 deletions
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index b3ccefd5f..3d5aa4fb4 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -227,33 +227,57 @@ def instance_processor(mapper, props_toload, context, column_collection,
query_entity, path, adapter,
only_load_props=None, refresh_state=None,
polymorphic_discriminator=None,
- _polymorphic_from=None):
+ _polymorphic_from=None,
+ _polymorphic_pk_getters=None,
+ _polymorphic_from_populators=None):
"""Produce a mapper level row processor callable
which processes rows into mapped instances."""
- populators = collections.defaultdict(list)
-
- for prop in props_toload:
- prop.setup(
- context,
- query_entity,
- path,
- adapter,
- only_load_props=only_load_props,
- column_collection=column_collection,
- populators=populators
- )
+ load_is_polymorphic = mapper.polymorphic_on is not None
- pk_cols = mapper.primary_key
+ if _polymorphic_from_populators is not None:
+ # if we are a subclass loader, then we use the collection
+ # of populators that were already set up for us.
- if adapter:
- pk_cols = [adapter.columns[c] for c in pk_cols]
+ populators = _polymorphic_from_populators
+ else:
+ populators = collections.defaultdict(list)
+
+ if load_is_polymorphic:
+ per_mapper_populators = collections.defaultdict(
+ lambda: collections.defaultdict(list))
+ per_mapper_populators[mapper] = populators
+
+ # establish all columns and column loaders up front
+ # across subclass mappers as well. Categorize loaders
+ # into mapper-specific buckets.
+
+ for prop in props_toload:
+ prop.setup(
+ context,
+ query_entity,
+ path,
+ adapter,
+ only_load_props=only_load_props,
+ column_collection=column_collection,
+ populators=per_mapper_populators[prop.parent]
+ if load_is_polymorphic
+ else populators
+ )
- pk_getters = [_IdxLoader()] * len(pk_cols)
- context.column_processors.extend(
- (pk_col, pk_getter.setup)
- for pk_col, pk_getter in zip(pk_cols, pk_getters)
- )
+ if _polymorphic_pk_getters:
+ pk_getters = _polymorphic_pk_getters
+ else:
+ pk_cols = mapper.primary_key
+
+ if adapter:
+ pk_cols = [adapter.columns[c] for c in pk_cols]
+
+ pk_getters = [_IdxLoader()] * len(pk_cols)
+ context.column_processors.extend(
+ (pk_col, pk_getter.setup)
+ for pk_col, pk_getter in zip(pk_cols, pk_getters)
+ )
identity_class = mapper._identity_class
@@ -400,13 +424,12 @@ def instance_processor(mapper, props_toload, context, column_collection,
return instance
- # TODO: this has to be reworked (again)
- # if not _polymorphic_from and not refresh_state:
+ if load_is_polymorphic and not _polymorphic_from and not refresh_state:
# if we are doing polymorphic, dispatch to a different _instance()
# method specific to the subclass mapper
- # _instance = _decorate_polymorphic_switch(
- # _instance, context, mapper, path,
- # polymorphic_discriminator, adapter)
+ _instance = _decorate_polymorphic_switch(
+ _instance, context, mapper, per_mapper_populators, path,
+ polymorphic_discriminator, adapter, pk_getters)
return _instance
@@ -493,8 +516,9 @@ def _validate_version_id(mapper, state, dict_, row, adapter):
def _decorate_polymorphic_switch(
- instance_fn, context, mapper, result, path,
- polymorphic_discriminator, adapter):
+ instance_fn, context, mapper, per_mapper_populators, path,
+ polymorphic_discriminator, adapter, pk_getters):
+
if polymorphic_discriminator is not None:
polymorphic_on = polymorphic_discriminator
else:
@@ -505,6 +529,12 @@ def _decorate_polymorphic_switch(
if adapter:
polymorphic_on = adapter.columns[polymorphic_on]
+ polymorphic_getter = _IdxLoader()
+ if polymorphic_discriminator is not mapper.polymorphic_on:
+ context.primary_columns.append(polymorphic_on)
+ context.column_processors.append(
+ (polymorphic_on, polymorphic_getter.setup))
+
def configure_subclass_mapper(discriminator):
try:
sub_mapper = mapper.polymorphic_map[discriminator]
@@ -516,16 +546,45 @@ def _decorate_polymorphic_switch(
if sub_mapper is mapper:
return None
+ populators = collections.defaultdict(list)
+ for super_mapper in sub_mapper.iterate_to_root():
+ mapper_populators = per_mapper_populators[super_mapper]
+ for k, collection in mapper_populators.items():
+ populators[k].extend(collection)
+ if super_mapper is mapper:
+ break
+
+ # TODO!
+ # big problems:
+ # 1. "quick" is being multiply populated with redundant
+ # populators
+ # 2. columns like "golf_swing", which are not rendered in
+ # setup(), therefore have no populator at all, we normally
+ # are expecting an "expire" populator to set up for a deferred
+ # load. We need to either make it so these populators aren't
+ # needed or
+ # that we in here do actually add more non-column populators,
+ # which may mean that we need some version of
+ # row_processor() again for this case. It would be
+ # along the lines of missing_attribute_populator() and would be
+ # specific to those cases where we have to produce a subclass
+ # against a query that did not specify this class in its
+ # entities.
+ # if discriminator == 'boss':
+ # import pdb
+ # pdb.set_trace()
return instance_processor(
- sub_mapper, context, result,
- path, adapter, _polymorphic_from=mapper)
+ sub_mapper, None, context, None, None,
+ path, adapter, _polymorphic_from=mapper,
+ _polymorphic_pk_getters=pk_getters,
+ _polymorphic_from_populators=populators)
polymorphic_instances = util.PopulateDict(
configure_subclass_mapper
)
def polymorphic_instance(row):
- discriminator = row[polymorphic_on]
+ discriminator = polymorphic_getter(row)
if discriminator is not None:
_instance = polymorphic_instances[discriminator]
if _instance:
diff --git a/test/orm/test_loading.py b/test/orm/test_loading.py
index 69afdb10a..2ba741b53 100644
--- a/test/orm/test_loading.py
+++ b/test/orm/test_loading.py
@@ -1,4 +1,5 @@
from . import _fixtures
+from .inheritance import _poly_fixtures
from sqlalchemy import text
from sqlalchemy.orm import loading, Session, aliased, Bundle
from sqlalchemy.testing.assertions import eq_, assert_raises
@@ -123,6 +124,29 @@ class InstancesTest(_fixtures.FixtureTest):
)
+class PolymorphicInstancesTest(_poly_fixtures._Polymorphic):
+ def test_query_load_entity(self):
+ Person, Engineer, Manager, Boss = (
+ _poly_fixtures.Person, _poly_fixtures.Engineer,
+ _poly_fixtures.Manager, _poly_fixtures.Boss)
+
+ s = Session()
+
+ q = s.query(Person).order_by(Person.person_id)
+ rows = q.all()
+
+ eq_(
+ rows,
+ [
+ Engineer(name='dilbert'),
+ Engineer(name='wally'),
+ Boss(name='pointy haired boss', golf_swing='fore!'),
+ Manager(manager_name='dogbert'),
+ Engineer(name='vlad')
+ ]
+ )
+
+
class MergeResultTest(_fixtures.FixtureTest):
run_setup_mappers = 'once'
run_inserts = 'once'