summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
Diffstat (limited to 'examples')
-rw-r--r--examples/adjacency_list/__init__.py2
-rw-r--r--examples/adjacency_list/adjacency_list.py29
-rw-r--r--examples/association/__init__.py14
-rw-r--r--examples/association/basic_association.py6
-rw-r--r--examples/association/dict_of_sets_with_default.py7
-rw-r--r--examples/association/proxied_association.py8
-rw-r--r--examples/custom_attributes/__init__.py14
-rw-r--r--examples/dogpile_caching/__init__.py45
-rw-r--r--examples/dogpile_caching/advanced.py8
-rw-r--r--examples/dogpile_caching/caching_query.py5
-rw-r--r--examples/dogpile_caching/environment.py9
-rw-r--r--examples/dogpile_caching/helloworld.py12
-rw-r--r--examples/dogpile_caching/local_session_caching.py5
-rw-r--r--examples/dogpile_caching/model.py7
-rw-r--r--examples/dogpile_caching/relationship_caching.py (renamed from examples/dogpile_caching/relation_caching.py)3
-rw-r--r--examples/dynamic_dict/__init__.py2
-rw-r--r--examples/elementtree/__init__.py23
-rw-r--r--examples/elementtree/adjacency_list.py10
-rw-r--r--examples/elementtree/optimized_al.py10
-rw-r--r--examples/generic_associations/__init__.py15
-rw-r--r--examples/generic_associations/discriminator_on_association.py84
-rw-r--r--examples/generic_associations/generic_fk.py140
-rw-r--r--examples/generic_associations/table_per_association.py11
-rw-r--r--examples/generic_associations/table_per_related.py24
-rw-r--r--examples/graphs/__init__.py2
-rw-r--r--examples/inheritance/__init__.py2
-rw-r--r--examples/inheritance/concrete.py2
-rw-r--r--examples/inheritance/joined.py3
-rw-r--r--examples/inheritance/single.py2
-rw-r--r--examples/join_conditions/__init__.py7
-rw-r--r--examples/join_conditions/cast.py95
-rw-r--r--examples/join_conditions/threeway.py108
-rw-r--r--examples/large_collection/__init__.py2
-rw-r--r--examples/nested_sets/__init__.py2
-rw-r--r--examples/nested_sets/nested_sets.py4
-rw-r--r--examples/postgis/__init__.py2
-rw-r--r--examples/sharding/__init__.py2
-rw-r--r--examples/versioned_history/__init__.py (renamed from examples/versioning/__init__.py)6
-rw-r--r--examples/versioned_history/history_meta.py (renamed from examples/versioning/history_meta.py)11
-rw-r--r--examples/versioned_history/test_versioning.py (renamed from examples/versioning/test_versioning.py)165
-rw-r--r--examples/versioned_rows/__init__.py7
-rw-r--r--examples/versioned_rows/versioned_map.py284
-rw-r--r--examples/versioned_rows/versioned_rows.py105
-rw-r--r--examples/versioning/_lib.py96
-rw-r--r--examples/vertical/__init__.py1
-rw-r--r--examples/vertical/dictlike-polymorphic.py214
-rw-r--r--examples/vertical/dictlike.py179
47 files changed, 1211 insertions, 583 deletions
diff --git a/examples/adjacency_list/__init__.py b/examples/adjacency_list/__init__.py
index 44f27090b..5d80363e4 100644
--- a/examples/adjacency_list/__init__.py
+++ b/examples/adjacency_list/__init__.py
@@ -12,5 +12,7 @@ E.g.::
dump_tree(node)
+.. autosource::
+
"""
diff --git a/examples/adjacency_list/adjacency_list.py b/examples/adjacency_list/adjacency_list.py
index a0683ea0c..9e62bc0be 100644
--- a/examples/adjacency_list/adjacency_list.py
+++ b/examples/adjacency_list/adjacency_list.py
@@ -1,7 +1,5 @@
-from sqlalchemy import MetaData, Table, Column, Sequence, ForeignKey,\
- Integer, String, create_engine
-
-from sqlalchemy.orm import sessionmaker, relationship, backref,\
+from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
+from sqlalchemy.orm import Session, relationship, backref,\
joinedload_all
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm.collections import attribute_mapped_collection
@@ -18,7 +16,7 @@ class TreeNode(Base):
children = relationship("TreeNode",
# cascade deletions
- cascade="all",
+ cascade="all, delete-orphan",
# many to one + adjacency list - remote_side
# is required to reference the 'remote'
@@ -42,11 +40,10 @@ class TreeNode(Base):
)
def dump(self, _indent=0):
-
return " " * _indent + repr(self) + \
"\n" + \
"".join([
- c.dump(_indent +1)
+ c.dump(_indent + 1)
for c in self.children.values()]
)
@@ -63,10 +60,7 @@ if __name__ == '__main__':
Base.metadata.create_all(engine)
- # session. using expire_on_commit=False
- # so that the session's contents are not expired
- # after each transaction commit.
- session = sessionmaker(engine, expire_on_commit=False)()
+ session = Session(engine)
node = TreeNode('rootnode')
TreeNode('node1', parent=node)
@@ -91,16 +85,13 @@ if __name__ == '__main__':
TreeNode('subnode4', parent=node.children['node4'])
TreeNode('subsubnode1', parent=node.children['node4'].children['subnode3'])
- # mark node1 as deleted and remove
- session.delete(node.children['node1'])
+ # remove node1 from the parent, which will trigger a delete
+ # via the delete-orphan cascade.
+ del node.children['node1']
msg("Removed node1. flush + commit:")
session.commit()
- # expire the "children" collection so that
- # it reflects the deletion of "node1".
- session.expire(node, ['children'])
-
msg("Tree after save:\n %s", node.dump())
msg("Emptying out the session entirely, "
@@ -109,12 +100,12 @@ if __name__ == '__main__':
node = session.query(TreeNode).\
options(joinedload_all("children", "children",
"children", "children")).\
- filter(TreeNode.name=="rootnode").\
+ filter(TreeNode.name == "rootnode").\
first()
msg("Full Tree:\n%s", node.dump())
- msg( "Marking root node as deleted, flush + commit:" )
+ msg("Marking root node as deleted, flush + commit:")
session.delete(node)
session.commit()
diff --git a/examples/association/__init__.py b/examples/association/__init__.py
index df736f4fb..4cd64c22f 100644
--- a/examples/association/__init__.py
+++ b/examples/association/__init__.py
@@ -1,20 +1,8 @@
"""
-
Examples illustrating the usage of the "association object" pattern,
where an intermediary class mediates the relationship between two
classes that are associated in a many-to-many pattern.
-This directory includes the following examples:
-
-* basic_association.py - illustrate a many-to-many relationship between an
- "Order" and a collection of "Item" objects, associating a purchase price
- with each via an association object called "OrderItem"
-* proxied_association.py - same example as basic_association, adding in
- usage of :mod:`sqlalchemy.ext.associationproxy` to make explicit references
- to "OrderItem" optional.
-* dict_of_sets_with_default.py - an advanced association proxy example which
- illustrates nesting of association proxies to produce multi-level Python
- collections, in this case a dictionary with string keys and sets of integers
- as values, which conceal the underlying mapped classes.
+.. autosource::
""" \ No newline at end of file
diff --git a/examples/association/basic_association.py b/examples/association/basic_association.py
index a175b1b89..8a8a54ad1 100644
--- a/examples/association/basic_association.py
+++ b/examples/association/basic_association.py
@@ -1,4 +1,8 @@
-"""A basic example of using the association object pattern.
+"""basic_association.py
+
+illustrate a many-to-many relationship between an
+"Order" and a collection of "Item" objects, associating a purchase price
+with each via an association object called "OrderItem"
The association object pattern is a form of many-to-many which
associates additional data with each association between parent/child.
diff --git a/examples/association/dict_of_sets_with_default.py b/examples/association/dict_of_sets_with_default.py
index f541727e7..fc4aebfca 100644
--- a/examples/association/dict_of_sets_with_default.py
+++ b/examples/association/dict_of_sets_with_default.py
@@ -1,4 +1,9 @@
-"""Illustrate a 'dict of sets of integers' model.
+"""dict_of_sets_with_default.py
+
+an advanced association proxy example which
+illustrates nesting of association proxies to produce multi-level Python
+collections, in this case a dictionary with string keys and sets of integers
+as values, which conceal the underlying mapped classes.
This is a three table model which represents a parent table referencing a
dictionary of string keys and sets as values, where each set stores a
diff --git a/examples/association/proxied_association.py b/examples/association/proxied_association.py
index 4cf1c51be..7cb4c9338 100644
--- a/examples/association/proxied_association.py
+++ b/examples/association/proxied_association.py
@@ -1,5 +1,9 @@
-"""An extension to the basic_association.py example, which illustrates
-the usage of sqlalchemy.ext.associationproxy.
+"""proxied_association.py
+
+same example as basic_association, adding in
+usage of :mod:`sqlalchemy.ext.associationproxy` to make explicit references
+to ``OrderItem`` optional.
+
"""
diff --git a/examples/custom_attributes/__init__.py b/examples/custom_attributes/__init__.py
index b28e97d95..2072c051f 100644
--- a/examples/custom_attributes/__init__.py
+++ b/examples/custom_attributes/__init__.py
@@ -2,18 +2,6 @@
Two examples illustrating modifications to SQLAlchemy's attribute management
system.
-``listen_for_events.py`` illustrates the usage of
-:class:`~sqlalchemy.orm.interfaces.AttributeExtension` to intercept attribute
-events. It additionally illustrates a way to automatically attach these
-listeners to all class attributes using a
-:class:`.InstrumentationManager`.
-
-``custom_management.py`` illustrates much deeper usage of
-:class:`.InstrumentationManager` as well as
-collection adaptation, to completely change the underlying method used to
-store state on an object. This example was developed to illustrate
-techniques which would be used by other third party object instrumentation
-systems to interact with SQLAlchemy's event system and is only intended for
-very intricate framework integrations.
+.. autosource::
""" \ No newline at end of file
diff --git a/examples/dogpile_caching/__init__.py b/examples/dogpile_caching/__init__.py
index 00c386bda..bf67eeb17 100644
--- a/examples/dogpile_caching/__init__.py
+++ b/examples/dogpile_caching/__init__.py
@@ -40,45 +40,20 @@ exactly one SQL statement against two tables will be emitted - the
displayed result however will utilize dozens of lazyloads that all
pull from cache.
-The demo scripts themselves, in order of complexity, are run as follows::
+The demo scripts themselves, in order of complexity, are run as Python
+modules so that relative imports work::
- python examples/dogpile_caching/helloworld.py
+ python -m examples.dogpile_caching.helloworld
- python examples/dogpile_caching/relationship_caching.py
+ python -m examples.dogpile_caching.relationship_caching
- python examples/dogpile_caching/advanced.py
+ python -m examples.dogpile_caching.advanced
- python examples/dogpile_caching/local_session_caching.py
+ python -m examples.dogpile_caching.local_session_caching
-
-Listing of files:
-
- environment.py - Establish the Session, a dictionary
- of "regions", a sample cache region against a .dbm
- file, data / cache file paths, and configurations,
- bootstrap fixture data if necessary.
-
- caching_query.py - Represent functions and classes
- which allow the usage of Dogpile caching with SQLAlchemy.
- Introduces a query option called FromCache.
-
- model.py - The datamodel, which represents Person that has multiple
- Address objects, each with PostalCode, City, Country
-
- fixture_data.py - creates demo PostalCode, Address, Person objects
- in the database.
-
- helloworld.py - the basic idea.
-
- relationship_caching.py - Illustrates how to add cache options on
- relationship endpoints, so that lazyloads load from cache.
-
- advanced.py - Further examples of how to use FromCache. Combines
- techniques from the first two scripts.
-
- local_session_caching.py - Grok everything so far ? This example
- creates a new dogpile.cache backend that will persist data in a dictionary
- which is local to the current session. remove() the session
- and the cache is gone.
+.. autosource::
+ :files: environment.py, caching_query.py, model.py, fixture_data.py, \
+ helloworld.py, relationship_caching.py, advanced.py, \
+ local_session_caching.py
"""
diff --git a/examples/dogpile_caching/advanced.py b/examples/dogpile_caching/advanced.py
index f1a18a4d7..feccaa3ba 100644
--- a/examples/dogpile_caching/advanced.py
+++ b/examples/dogpile_caching/advanced.py
@@ -1,15 +1,13 @@
"""advanced.py
Illustrate usage of Query combined with the FromCache option,
-including front-end loading, cache invalidation, namespace techniques
-and collection caching.
+including front-end loading, cache invalidation and collection caching.
"""
from .environment import Session
-from .model import Person, Address, cache_address_bits
+from .model import Person, cache_address_bits
from .caching_query import FromCache, RelationshipCache
-from sqlalchemy.orm import joinedload
def load_name_range(start, end, invalidate=False):
"""Load Person objects on a range of names.
@@ -23,7 +21,7 @@ def load_name_range(start, end, invalidate=False):
The `Person.addresses` collections are also cached. Its basically
another level of tuning here, as that particular cache option
can be transparently replaced with joinedload(Person.addresses).
- The effect is that each Person and his/her Address collection
+ The effect is that each Person and their Address collection
is cached either together or separately, affecting the kind of
SQL that emits for unloaded Person objects as well as the distribution
of data within the cache.
diff --git a/examples/dogpile_caching/caching_query.py b/examples/dogpile_caching/caching_query.py
index 7fe84bede..9ac0d431a 100644
--- a/examples/dogpile_caching/caching_query.py
+++ b/examples/dogpile_caching/caching_query.py
@@ -1,7 +1,8 @@
"""caching_query.py
-Represent persistence structures which allow the usage of
-dogpile.cache caching with SQLAlchemy.
+Represent functions and classes
+which allow the usage of Dogpile caching with SQLAlchemy.
+Introduces a query option called FromCache.
The three new concepts introduced here are:
diff --git a/examples/dogpile_caching/environment.py b/examples/dogpile_caching/environment.py
index 36b9585b2..aeba65e19 100644
--- a/examples/dogpile_caching/environment.py
+++ b/examples/dogpile_caching/environment.py
@@ -10,7 +10,12 @@ from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from dogpile.cache.region import make_region
import os
-import md5
+from hashlib import md5
+import sys
+py2k = sys.version_info < (3, 0)
+
+if py2k:
+ input = raw_input
# dogpile cache regions. A home base for cache configurations.
regions = {}
@@ -47,7 +52,7 @@ def md5_key_mangler(key):
distill them into an md5 hash.
"""
- return md5.md5(key).hexdigest()
+ return md5(key.encode('ascii')).hexdigest()
# configure the "default" cache region.
regions['default'] = make_region(
diff --git a/examples/dogpile_caching/helloworld.py b/examples/dogpile_caching/helloworld.py
index 4561097b6..22d7f97be 100644
--- a/examples/dogpile_caching/helloworld.py
+++ b/examples/dogpile_caching/helloworld.py
@@ -8,27 +8,27 @@ from .environment import Session
from .model import Person
from .caching_query import FromCache
-# load Person objects. cache the result under the namespace "all_people".
+# load Person objects. cache the result in the "default" cache region
print("loading people....")
people = Session.query(Person).options(FromCache("default")).all()
# remove the Session. next query starts from scratch.
Session.remove()
-# load again, using the same FromCache option. now they're cached
-# under "all_people", no SQL is emitted.
+# load again, using the same FromCache option. now they're cached,
+# so no SQL is emitted.
print("loading people....again!")
people = Session.query(Person).options(FromCache("default")).all()
-# want to load on some different kind of query ? change the namespace
-# you send to FromCache
+# Specifying a different query produces a different cache key, so
+# these results are independently cached.
print("loading people two through twelve")
people_two_through_twelve = Session.query(Person).\
options(FromCache("default")).\
filter(Person.name.between("person 02", "person 12")).\
all()
-# the data is cached under the "namespace" you send to FromCache, *plus*
+# the data is cached under string structure of the SQL statement, *plus*
# the bind parameters of the query. So this query, having
# different literal parameters under "Person.name.between()" than the
# previous one, issues new SQL...
diff --git a/examples/dogpile_caching/local_session_caching.py b/examples/dogpile_caching/local_session_caching.py
index cf0083d2e..e6c712b4a 100644
--- a/examples/dogpile_caching/local_session_caching.py
+++ b/examples/dogpile_caching/local_session_caching.py
@@ -1,5 +1,10 @@
"""local_session_caching.py
+Grok everything so far ? This example
+creates a new dogpile.cache backend that will persist data in a dictionary
+which is local to the current session. remove() the session
+and the cache is gone.
+
Create a new Dogpile cache backend that will store
cached data local to the current Session.
diff --git a/examples/dogpile_caching/model.py b/examples/dogpile_caching/model.py
index 622d31e6a..75c0ad28a 100644
--- a/examples/dogpile_caching/model.py
+++ b/examples/dogpile_caching/model.py
@@ -1,6 +1,7 @@
-"""Model. We are modeling Person objects with a collection
-of Address objects. Each Address has a PostalCode, which
-in turn references a City and then a Country:
+"""model.py
+
+The datamodel, which represents Person that has multiple
+Address objects, each with PostalCode, City, Country.
Person --(1..n)--> Address
Address --(has a)--> PostalCode
diff --git a/examples/dogpile_caching/relation_caching.py b/examples/dogpile_caching/relationship_caching.py
index d40752e48..320ced48a 100644
--- a/examples/dogpile_caching/relation_caching.py
+++ b/examples/dogpile_caching/relationship_caching.py
@@ -1,5 +1,8 @@
"""relationship_caching.py
+Illustrates how to add cache options on
+relationship endpoints, so that lazyloads load from cache.
+
Load a set of Person and Address objects, specifying that
related PostalCode, City, Country objects should be pulled from long
term cache.
diff --git a/examples/dynamic_dict/__init__.py b/examples/dynamic_dict/__init__.py
index 7f7b0691d..e592ea200 100644
--- a/examples/dynamic_dict/__init__.py
+++ b/examples/dynamic_dict/__init__.py
@@ -3,4 +3,6 @@
string keys) can operate upon a large collection without loading the
full collection at once.
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/elementtree/__init__.py b/examples/elementtree/__init__.py
index 6462dd562..66e9cfbbe 100644
--- a/examples/elementtree/__init__.py
+++ b/examples/elementtree/__init__.py
@@ -7,26 +7,6 @@ native cElementTree as well as lxml, and can be adapted to
suit any kind of DOM representation system. Querying along
xpath-like strings is illustrated as well.
-In order of complexity:
-
-* ``pickle.py`` - Quick and dirty, serialize the whole DOM into a BLOB
- column. While the example is very brief, it has very limited
- functionality.
-
-* ``adjacency_list.py`` - Each DOM node is stored in an individual
- table row, with attributes represented in a separate table. The
- nodes are associated in a hierarchy using an adjacency list
- structure. A query function is introduced which can search for nodes
- along any path with a given structure of attributes, basically a
- (very narrow) subset of xpath.
-
-* ``optimized_al.py`` - Uses the same strategy as
- ``adjacency_list.py``, but associates each DOM row with its owning
- document row, so that a full document of DOM nodes can be loaded
- using O(1) queries - the construction of the "hierarchy" is performed
- after the load in a non-recursive fashion and is much more
- efficient.
-
E.g.::
# parse an XML file and persist in the database
@@ -39,4 +19,7 @@ E.g.::
# dump the XML
print document
+.. autosource::
+ :files: pickle.py, adjacency_list.py, optimized_al.py
+
""" \ No newline at end of file
diff --git a/examples/elementtree/adjacency_list.py b/examples/elementtree/adjacency_list.py
index a3ad42778..5e27ba9ca 100644
--- a/examples/elementtree/adjacency_list.py
+++ b/examples/elementtree/adjacency_list.py
@@ -1,9 +1,17 @@
-"""illustrates an explicit way to persist an XML document expressed using ElementTree.
+"""Illustrates an explicit way to persist an XML document expressed using ElementTree.
+
+Each DOM node is stored in an individual
+table row, with attributes represented in a separate table. The
+nodes are associated in a hierarchy using an adjacency list
+structure. A query function is introduced which can search for nodes
+along any path with a given structure of attributes, basically a
+(very narrow) subset of xpath.
This example explicitly marshals/unmarshals the ElementTree document into
mapped entities which have their own tables. Compare to pickle.py which
uses pickle to accomplish the same task. Note that the usage of both
styles of persistence are identical, as is the structure of the main Document class.
+
"""
################################# PART I - Imports/Coniguration ####################################
diff --git a/examples/elementtree/optimized_al.py b/examples/elementtree/optimized_al.py
index 1dbad0943..e13f5b0ee 100644
--- a/examples/elementtree/optimized_al.py
+++ b/examples/elementtree/optimized_al.py
@@ -1,7 +1,9 @@
-"""This script duplicates adjacency_list.py, but optimizes the loading
-of XML nodes to be based on a "flattened" datamodel. Any number of XML documents,
-each of arbitrary complexity, can be loaded in their entirety via a single query
-which joins on only three tables.
+"""Uses the same strategy as
+ ``adjacency_list.py``, but associates each DOM row with its owning
+ document row, so that a full document of DOM nodes can be loaded
+ using O(1) queries - the construction of the "hierarchy" is performed
+ after the load in a non-recursive fashion and is more
+ efficient.
"""
diff --git a/examples/generic_associations/__init__.py b/examples/generic_associations/__init__.py
index b6cb24088..b6593b4f4 100644
--- a/examples/generic_associations/__init__.py
+++ b/examples/generic_associations/__init__.py
@@ -9,17 +9,10 @@ subclassing the ``HasAddresses`` mixin, which ensures that the
parent class is provided with an ``addresses`` collection
which contains ``Address`` objects.
-The configurations include:
+The :viewsource:`.discriminator_on_association` and :viewsource:`.generic_fk` scripts
+are modernized versions of recipes presented in the 2007 blog post
+`Polymorphic Associations with SQLAlchemy <http://techspot.zzzeek.org/2007/05/29/polymorphic-associations-with-sqlalchemy/>`_.
-* ``table_per_related.py`` - illustrates a distinct table per related collection.
-* ``table_per_association.py`` - illustrates a shared collection table, using a
- table per association.
-* ``discriminator_on_association.py`` - shared collection table and shared
- association table, including a discriminator column.
-
-The ``discriminator_on_association.py`` script in particular is a modernized
-version of the "polymorphic associations" example present in older versions of
-SQLAlchemy, originally from the blog post at
-http://techspot.zzzeek.org/2007/05/29/polymorphic-associations-with-sqlalchemy/.
+.. autosource::
""" \ No newline at end of file
diff --git a/examples/generic_associations/discriminator_on_association.py b/examples/generic_associations/discriminator_on_association.py
index 7b4565a85..e03cfec00 100644
--- a/examples/generic_associations/discriminator_on_association.py
+++ b/examples/generic_associations/discriminator_on_association.py
@@ -1,28 +1,29 @@
"""discriminator_on_related.py
-The HasAddresses mixin will provide a relationship
-to the fixed Address table based on a fixed association table.
-
-The association table will also contain a "discriminator"
-which determines what type of parent object associates to the
-Address row.
-
-This is a "polymorphic association". Even though a "discriminator"
-that refers to a particular table is present, the extra association
-table is used so that traditional foreign key constraints may be used.
-
-This configuration has the advantage that a fixed set of tables
-are used, with no extra-table-per-parent needed. The individual
-Address record can also locate its parent with no need to scan
-amongst many tables.
+Illustrates a mixin which provides a generic association
+using a single target table and a single association table,
+referred to by all parent tables. The association table
+contains a "discriminator" column which determines what type of
+parent object associates to each particular row in the association
+table.
+
+SQLAlchemy's single-table-inheritance feature is used
+to target different association types.
+
+This configuration attempts to simulate a so-called "generic foreign key"
+as closely as possible without actually foregoing the use of real
+foreign keys. Unlike table-per-related and table-per-association,
+it uses a fixed number of tables to serve any number of potential parent
+objects, but is also slightly more complex.
"""
-from sqlalchemy.ext.declarative import declarative_base, declared_attr
+from sqlalchemy.ext.declarative import as_declarative, declared_attr
from sqlalchemy import create_engine, Integer, Column, \
- String, ForeignKey, Table
+ String, ForeignKey
from sqlalchemy.orm import Session, relationship, backref
from sqlalchemy.ext.associationproxy import association_proxy
+@as_declarative()
class Base(object):
"""Base class which provides automated table name
and surrogate primary key column.
@@ -32,7 +33,6 @@ class Base(object):
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
-Base = declarative_base(cls=Base)
class AddressAssociation(Base):
"""Associates a collection of Address objects
@@ -41,22 +41,10 @@ class AddressAssociation(Base):
"""
__tablename__ = "address_association"
- @classmethod
- def creator(cls, discriminator):
- """Provide a 'creator' function to use with
- the association proxy."""
-
- return lambda addresses:AddressAssociation(
- addresses=addresses,
- discriminator=discriminator)
-
discriminator = Column(String)
"""Refers to the type of parent."""
- @property
- def parent(self):
- """Return the parent object."""
- return getattr(self, "%s_parent" % self.discriminator)
+ __mapper_args__ = {"polymorphic_on": discriminator}
class Address(Base):
"""The Address class.
@@ -65,15 +53,11 @@ class Address(Base):
single table.
"""
- association_id = Column(Integer,
- ForeignKey("address_association.id")
- )
+ association_id = Column(Integer, ForeignKey("address_association.id"))
street = Column(String)
city = Column(String)
zip = Column(String)
- association = relationship(
- "AddressAssociation",
- backref="addresses")
+ association = relationship("AddressAssociation", backref="addresses")
parent = association_proxy("association", "parent")
@@ -89,19 +73,29 @@ class HasAddresses(object):
"""
@declared_attr
def address_association_id(cls):
- return Column(Integer,
- ForeignKey("address_association.id"))
+ return Column(Integer, ForeignKey("address_association.id"))
@declared_attr
def address_association(cls):
- discriminator = cls.__name__.lower()
- cls.addresses= association_proxy(
+ name = cls.__name__
+ discriminator = name.lower()
+
+ assoc_cls = type(
+ "%sAddressAssociation" % name,
+ (AddressAssociation, ),
+ dict(
+ __mapper_args__={
+ "polymorphic_identity": discriminator
+ }
+ )
+ )
+
+ cls.addresses = association_proxy(
"address_association", "addresses",
- creator=AddressAssociation.creator(discriminator)
+ creator=lambda addresses: assoc_cls(addresses=addresses)
)
- return relationship("AddressAssociation",
- backref=backref("%s_parent" % discriminator,
- uselist=False))
+ return relationship(assoc_cls,
+ backref=backref("parent", uselist=False))
class Customer(HasAddresses, Base):
diff --git a/examples/generic_associations/generic_fk.py b/examples/generic_associations/generic_fk.py
new file mode 100644
index 000000000..e228c6ba4
--- /dev/null
+++ b/examples/generic_associations/generic_fk.py
@@ -0,0 +1,140 @@
+"""generic_fk.py
+
+Illustrates a so-called "generic foreign key", in a similar fashion
+to that of popular frameworks such as Django, ROR, etc. This
+approach bypasses standard referential integrity
+practices, in that the "foreign key" column is not actually
+constrained to refer to any particular table; instead,
+in-application logic is used to determine which table is referenced.
+
+This approach is not in line with SQLAlchemy's usual style, as foregoing
+foreign key integrity means that the tables can easily contain invalid
+references and also have no ability to use in-database cascade functionality.
+
+However, due to the popularity of these systems, as well as that it uses
+the fewest number of tables (which doesn't really offer any "advantage",
+though seems to be comforting to many) this recipe remains in
+high demand, so in the interests of having an easy StackOverflow answer
+queued up, here it is. The author recommends "table_per_related"
+or "table_per_association" instead of this approach.
+
+.. versionadded:: 0.8.3
+
+"""
+from sqlalchemy.ext.declarative import as_declarative, declared_attr
+from sqlalchemy import create_engine, Integer, Column, \
+ String, and_
+from sqlalchemy.orm import Session, relationship, foreign, remote, backref
+from sqlalchemy import event
+
+
+@as_declarative()
+class Base(object):
+ """Base class which provides automated table name
+ and surrogate primary key column.
+
+ """
+ @declared_attr
+ def __tablename__(cls):
+ return cls.__name__.lower()
+ id = Column(Integer, primary_key=True)
+
+class Address(Base):
+ """The Address class.
+
+ This represents all address records in a
+ single table.
+
+ """
+ street = Column(String)
+ city = Column(String)
+ zip = Column(String)
+
+ discriminator = Column(String)
+ """Refers to the type of parent."""
+
+ parent_id = Column(Integer)
+ """Refers to the primary key of the parent.
+
+ This could refer to any table.
+ """
+
+ @property
+ def parent(self):
+ """Provides in-Python access to the "parent" by choosing
+ the appropriate relationship.
+
+ """
+ return getattr(self, "parent_%s" % self.discriminator)
+
+ def __repr__(self):
+ return "%s(street=%r, city=%r, zip=%r)" % \
+ (self.__class__.__name__, self.street,
+ self.city, self.zip)
+
+class HasAddresses(object):
+ """HasAddresses mixin, creates a relationship to
+ the address_association table for each parent.
+
+ """
+
+@event.listens_for(HasAddresses, "mapper_configured", propagate=True)
+def setup_listener(mapper, class_):
+ name = class_.__name__
+ discriminator = name.lower()
+ class_.addresses = relationship(Address,
+ primaryjoin=and_(
+ class_.id == foreign(remote(Address.parent_id)),
+ Address.discriminator == discriminator
+ ),
+ backref=backref(
+ "parent_%s" % discriminator,
+ primaryjoin=remote(class_.id) == foreign(Address.parent_id)
+ )
+ )
+ @event.listens_for(class_.addresses, "append")
+ def append_address(target, value, initiator):
+ value.discriminator = discriminator
+
+class Customer(HasAddresses, Base):
+ name = Column(String)
+
+class Supplier(HasAddresses, Base):
+ company_name = Column(String)
+
+engine = create_engine('sqlite://', echo=True)
+Base.metadata.create_all(engine)
+
+session = Session(engine)
+
+session.add_all([
+ Customer(
+ name='customer 1',
+ addresses=[
+ Address(
+ street='123 anywhere street',
+ city="New York",
+ zip="10110"),
+ Address(
+ street='40 main street',
+ city="San Francisco",
+ zip="95732")
+ ]
+ ),
+ Supplier(
+ company_name="Ace Hammers",
+ addresses=[
+ Address(
+ street='2569 west elm',
+ city="Detroit",
+ zip="56785")
+ ]
+ ),
+])
+
+session.commit()
+
+for customer in session.query(Customer):
+ for address in customer.addresses:
+ print(address)
+ print(address.parent) \ No newline at end of file
diff --git a/examples/generic_associations/table_per_association.py b/examples/generic_associations/table_per_association.py
index 84e85de2f..4993492a4 100644
--- a/examples/generic_associations/table_per_association.py
+++ b/examples/generic_associations/table_per_association.py
@@ -1,8 +1,9 @@
"""table_per_association.py
-The HasAddresses mixin will provide a new "address_association" table for
-each parent class. The "address" table will be shared
-for all parents.
+Illustrates a mixin which provides a generic association
+via a individually generated association tables for each parent class.
+The associated objects themselves are persisted in a single table
+shared among all parents.
This configuration has the advantage that all Address
rows are in one table, so that the definition of "Address"
@@ -12,11 +13,12 @@ has no dependency on the system.
"""
-from sqlalchemy.ext.declarative import declarative_base, declared_attr
+from sqlalchemy.ext.declarative import as_declarative, declared_attr
from sqlalchemy import create_engine, Integer, Column, \
String, ForeignKey, Table
from sqlalchemy.orm import Session, relationship
+@as_declarative()
class Base(object):
"""Base class which provides automated table name
and surrogate primary key column.
@@ -26,7 +28,6 @@ class Base(object):
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
-Base = declarative_base(cls=Base)
class Address(Base):
"""The Address class.
diff --git a/examples/generic_associations/table_per_related.py b/examples/generic_associations/table_per_related.py
index 0ec5f29b0..aff6e40ce 100644
--- a/examples/generic_associations/table_per_related.py
+++ b/examples/generic_associations/table_per_related.py
@@ -1,7 +1,8 @@
"""table_per_related.py
-The HasAddresses mixin will provide a new "address" table for
-each parent class, as well as a distinct "Address" subclass.
+Illustrates a generic association which persists association
+objects within individual tables, each one generated to persist
+those objects on behalf of a particular parent class.
This configuration has the advantage that each type of parent
maintains its "Address" rows separately, so that collection
@@ -9,11 +10,19 @@ size for one type of parent will have no impact on other types
of parent. Navigation between parent and "Address" is simple,
direct, and bidirectional.
+This recipe is the most efficient (speed wise and storage wise)
+and simple of all of them.
+
+The creation of many related tables may seem at first like an issue
+but there really isn't any - the management and targeting of these tables
+is completely automated.
+
"""
-from sqlalchemy.ext.declarative import declarative_base, declared_attr
+from sqlalchemy.ext.declarative import as_declarative, declared_attr
from sqlalchemy import create_engine, Integer, Column, String, ForeignKey
from sqlalchemy.orm import Session, relationship
+@as_declarative()
class Base(object):
"""Base class which provides automated table name
and surrogate primary key column.
@@ -23,7 +32,6 @@ class Base(object):
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
-Base = declarative_base(cls=Base)
class Address(object):
"""Define columns that will be present in each
@@ -54,11 +62,11 @@ class HasAddresses(object):
"%sAddress" % cls.__name__,
(Address, Base,),
dict(
- __tablename__ = "%s_address" %
+ __tablename__="%s_address" %
cls.__tablename__,
- parent_id = Column(Integer,
- ForeignKey("%s.id" % cls.__tablename__)),
- parent = relationship(cls)
+ parent_id=Column(Integer,
+ ForeignKey("%s.id" % cls.__tablename__)),
+ parent=relationship(cls)
)
)
return relationship(cls.Address)
diff --git a/examples/graphs/__init__.py b/examples/graphs/__init__.py
index 629808abe..57d41453b 100644
--- a/examples/graphs/__init__.py
+++ b/examples/graphs/__init__.py
@@ -8,4 +8,6 @@ and querying for lower- and upper- neighbors are illustrated::
n2.add_neighbor(n5)
print n2.higher_neighbors()
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/inheritance/__init__.py b/examples/inheritance/__init__.py
index 09519a679..eb3e843ca 100644
--- a/examples/inheritance/__init__.py
+++ b/examples/inheritance/__init__.py
@@ -1,4 +1,6 @@
"""Working examples of single-table, joined-table, and concrete-table
inheritance as described in :ref:`datamapping_inheritance`.
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/inheritance/concrete.py b/examples/inheritance/concrete.py
index b05afa5ea..f9bdc81b4 100644
--- a/examples/inheritance/concrete.py
+++ b/examples/inheritance/concrete.py
@@ -1,3 +1,5 @@
+"""Concrete (table-per-class) inheritance example."""
+
from sqlalchemy import create_engine, MetaData, Table, Column, Integer, \
String
from sqlalchemy.orm import mapper, sessionmaker, polymorphic_union
diff --git a/examples/inheritance/joined.py b/examples/inheritance/joined.py
index c6ce37146..6e0205e04 100644
--- a/examples/inheritance/joined.py
+++ b/examples/inheritance/joined.py
@@ -1,4 +1,4 @@
-"""this example illustrates a polymorphic load of two classes"""
+"""Joined-table (table-per-subclass) inheritance example."""
from sqlalchemy import Table, Column, Integer, String, \
ForeignKey, create_engine, inspect, or_
@@ -133,3 +133,4 @@ print(session.query(Company).\
session.commit()
+
diff --git a/examples/inheritance/single.py b/examples/inheritance/single.py
index b445f74a6..22a6fe027 100644
--- a/examples/inheritance/single.py
+++ b/examples/inheritance/single.py
@@ -1,3 +1,5 @@
+"""Single-table inheritance example."""
+
from sqlalchemy import MetaData, Table, Column, Integer, String, \
ForeignKey, create_engine
from sqlalchemy.orm import mapper, relationship, sessionmaker
diff --git a/examples/join_conditions/__init__.py b/examples/join_conditions/__init__.py
new file mode 100644
index 000000000..3a561d084
--- /dev/null
+++ b/examples/join_conditions/__init__.py
@@ -0,0 +1,7 @@
+"""Examples of various :func:`.orm.relationship` configurations,
+which make use of the ``primaryjoin`` argument to compose special types
+of join conditions.
+
+.. autosource::
+
+""" \ No newline at end of file
diff --git a/examples/join_conditions/cast.py b/examples/join_conditions/cast.py
new file mode 100644
index 000000000..246bc1d57
--- /dev/null
+++ b/examples/join_conditions/cast.py
@@ -0,0 +1,95 @@
+"""Illustrate a :func:`.relationship` that joins two columns where those
+columns are not of the same type, and a CAST must be used on the SQL
+side in order to match them.
+
+When complete, we'd like to see a load of the relationship to look like::
+
+ -- load the primary row, a_id is a string
+ SELECT a.id AS a_id_1, a.a_id AS a_a_id
+ FROM a
+ WHERE a.a_id = '2'
+
+ -- then load the collection using CAST, b.a_id is an integer
+ SELECT b.id AS b_id, b.a_id AS b_a_id
+ FROM b
+ WHERE CAST('2' AS INTEGER) = b.a_id
+
+The relationship is essentially configured as follows::
+
+ class B(Base):
+ # ...
+
+ a = relationship(A,
+ primaryjoin=cast(A.a_id, Integer) == foreign(B.a_id),
+ backref="bs")
+
+Where above, we are making use of the :func:`.cast` function in order
+to produce CAST, as well as the :func:`.foreign` :term:`annotation` function
+in order to note to the ORM that ``B.a_id`` should be treated like the
+"foreign key" column.
+
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.ext.declarative import declarative_base
+
+Base = declarative_base()
+
+class StringAsInt(TypeDecorator):
+ """Coerce string->integer type.
+
+ This is needed only if the relationship() from
+ int to string is writable, as SQLAlchemy will copy
+ the string parent values into the integer attribute
+ on the child during a flush.
+
+ """
+ impl = Integer
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = int(value)
+ return value
+
+class A(Base):
+ """Parent. The referenced column is a string type."""
+
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(String)
+
+class B(Base):
+ """Child. The column we reference 'A' with is an integer."""
+
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(StringAsInt)
+ a = relationship("A",
+ # specify primaryjoin. The string form is optional
+ # here, but note that Declarative makes available all
+ # of the built-in functions we might need, including
+ # cast() and foreign().
+ primaryjoin="cast(A.a_id, Integer) == foreign(B.a_id)",
+ backref="bs")
+
+# we demonstrate with SQLite, but the important part
+# is the CAST rendered in the SQL output.
+
+e = create_engine('sqlite://', echo=True)
+Base.metadata.create_all(e)
+
+s = Session(e)
+
+s.add_all([
+ A(a_id="1"),
+ A(a_id="2", bs=[B(), B()]),
+ A(a_id="3", bs=[B()]),
+])
+s.commit()
+
+b1 = s.query(B).filter_by(a_id="2").first()
+print(b1.a)
+
+a1 = s.query(A).filter_by(a_id="2").first()
+print(a1.bs) \ No newline at end of file
diff --git a/examples/join_conditions/threeway.py b/examples/join_conditions/threeway.py
new file mode 100644
index 000000000..13df0f349
--- /dev/null
+++ b/examples/join_conditions/threeway.py
@@ -0,0 +1,108 @@
+"""Illustrate a "three way join" - where a primary table joins to a remote
+table via an association table, but then the primary table also needs
+to refer to some columns in the remote table directly.
+
+E.g.::
+
+ first.first_id -> second.first_id
+ second.other_id --> partitioned.other_id
+ first.partition_key ---------------------> partitioned.partition_key
+
+For a relationship like this, "second" is a lot like a "secondary" table,
+but the mechanics aren't present within the "secondary" feature to allow
+for the join directly between first and partitioned. Instead, we
+will derive a selectable from partitioned and second combined together, then
+link first to that derived selectable.
+
+If we define the derived selectable as::
+
+ second JOIN partitioned ON second.other_id = partitioned.other_id
+
+A JOIN from first to this derived selectable is then::
+
+ first JOIN (second JOIN partitioned
+ ON second.other_id = partitioned.other_id)
+ ON first.first_id = second.first_id AND
+ first.partition_key = partitioned.partition_key
+
+We will use the "non primary mapper" feature in order to produce this.
+A non primary mapper is essentially an "extra" :func:`.mapper` that we can
+use to associate a particular class with some selectable that is
+not its usual mapped table. It is used only when called upon within
+a Query (or a :func:`.relationship`).
+
+
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.ext.declarative import declarative_base
+
+Base = declarative_base()
+
+class First(Base):
+ __tablename__ = 'first'
+
+ first_id = Column(Integer, primary_key=True)
+ partition_key = Column(String)
+
+ def __repr__(self):
+ return ("First(%s, %s)" % (self.first_id, self.partition_key))
+
+class Second(Base):
+ __tablename__ = 'second'
+
+ first_id = Column(Integer, primary_key=True)
+ other_id = Column(Integer, primary_key=True)
+
+class Partitioned(Base):
+ __tablename__ = 'partitioned'
+
+ other_id = Column(Integer, primary_key=True)
+ partition_key = Column(String, primary_key=True)
+
+ def __repr__(self):
+ return ("Partitioned(%s, %s)" % (self.other_id, self.partition_key))
+
+
+j = join(Partitioned, Second, Partitioned.other_id == Second.other_id)
+
+partitioned_second = mapper(Partitioned, j, non_primary=True, properties={
+ # note we need to disambiguate columns here - the join()
+ # will provide them as j.c.<tablename>_<colname> for access,
+ # but they retain their real names in the mapping
+ "other_id": [j.c.partitioned_other_id, j.c.second_other_id],
+ })
+
+First.partitioned = relationship(
+ partitioned_second,
+ primaryjoin=and_(
+ First.partition_key == partitioned_second.c.partition_key,
+ First.first_id == foreign(partitioned_second.c.first_id)
+ ), innerjoin=True)
+
+# when using any database other than SQLite, we will get a nested
+# join, e.g. "first JOIN (partitioned JOIN second ON ..) ON ..".
+# On SQLite, SQLAlchemy needs to render a full subquery.
+e = create_engine("sqlite://", echo=True)
+
+Base.metadata.create_all(e)
+s = Session(e)
+s.add_all([
+ First(first_id=1, partition_key='p1'),
+ First(first_id=2, partition_key='p1'),
+ First(first_id=3, partition_key='p2'),
+ Second(first_id=1, other_id=1),
+ Second(first_id=2, other_id=1),
+ Second(first_id=3, other_id=2),
+ Partitioned(partition_key='p1', other_id=1),
+ Partitioned(partition_key='p1', other_id=2),
+ Partitioned(partition_key='p2', other_id=2),
+])
+s.commit()
+
+for row in s.query(First, Partitioned).join(First.partitioned):
+ print(row)
+
+for f in s.query(First):
+ for p in f.partitioned:
+ print(f.partition_key, p.partition_key)
diff --git a/examples/large_collection/__init__.py b/examples/large_collection/__init__.py
index 4098cd53a..432d9196f 100644
--- a/examples/large_collection/__init__.py
+++ b/examples/large_collection/__init__.py
@@ -9,4 +9,6 @@ objects is very large, including:
``passive_deletes=True`` to greatly improve the performance of
related collection deletion.
+.. autosource::
+
"""
diff --git a/examples/nested_sets/__init__.py b/examples/nested_sets/__init__.py
index 1a97b9aef..3e73bb13e 100644
--- a/examples/nested_sets/__init__.py
+++ b/examples/nested_sets/__init__.py
@@ -1,4 +1,6 @@
""" Illustrates a rudimentary way to implement the "nested sets"
pattern for hierarchical data using the SQLAlchemy ORM.
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/nested_sets/nested_sets.py b/examples/nested_sets/nested_sets.py
index 8225a09f2..c64b15b61 100644
--- a/examples/nested_sets/nested_sets.py
+++ b/examples/nested_sets/nested_sets.py
@@ -88,13 +88,13 @@ session.commit()
print(session.query(Employee).all())
-# 1. Find an employee and all his/her supervisors, no matter how deep the tree.
+# 1. Find an employee and all their supervisors, no matter how deep the tree.
ealias = aliased(Employee)
print(session.query(Employee).\
filter(ealias.left.between(Employee.left, Employee.right)).\
filter(ealias.emp == 'Eddie').all())
-#2. Find the employee and all his/her subordinates.
+#2. Find the employee and all their subordinates.
# (This query has a nice symmetry with the first query.)
print(session.query(Employee).\
filter(Employee.left.between(ealias.left, ealias.right)).\
diff --git a/examples/postgis/__init__.py b/examples/postgis/__init__.py
index cec5ad48a..250d9ce87 100644
--- a/examples/postgis/__init__.py
+++ b/examples/postgis/__init__.py
@@ -33,5 +33,7 @@ E.g.::
print session.query(Road).filter(Road.road_geom.intersects(r1.road_geom)).all()
+.. autosource::
+
"""
diff --git a/examples/sharding/__init__.py b/examples/sharding/__init__.py
index dacc815f9..59d26a217 100644
--- a/examples/sharding/__init__.py
+++ b/examples/sharding/__init__.py
@@ -27,4 +27,6 @@ is a simple method of assigning objects to different tables (and potentially
database nodes) in an explicit way - described on the wiki at
`EntityName <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/EntityName>`_.
+.. autosource::
+
"""
diff --git a/examples/versioning/__init__.py b/examples/versioned_history/__init__.py
index 4621fae3b..d630b994b 100644
--- a/examples/versioning/__init__.py
+++ b/examples/versioned_history/__init__.py
@@ -1,8 +1,6 @@
"""
Illustrates an extension which creates version tables for entities and stores
-records for each change. The same idea as Elixir's versioned extension, but
-more efficient (uses attribute API to get history) and handles class
-inheritance. The given extensions generate an anonymous "history" class which
+records for each change. The given extensions generate an anonymous "history" class which
represents historical versions of the target object.
Usage is illustrated via a unit test module ``test_versioning.py``, which can
@@ -57,4 +55,6 @@ can be applied::
SomeHistoryClass = SomeClass.__history_mapper__.class_
+.. autosource::
+
""" \ No newline at end of file
diff --git a/examples/versioning/history_meta.py b/examples/versioned_history/history_meta.py
index deef67a0a..8cb523434 100644
--- a/examples/versioning/history_meta.py
+++ b/examples/versioned_history/history_meta.py
@@ -1,3 +1,5 @@
+"""Versioned mixin class and other utilities."""
+
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import mapper, class_mapper, attributes, object_mapper
from sqlalchemy.orm.exc import UnmappedClassError, UnmappedColumnError
@@ -25,6 +27,7 @@ def _history_mapper(local_mapper):
polymorphic_on = None
super_fks = []
+
if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
cols = []
for column in local_mapper.local_table.c:
@@ -43,10 +46,10 @@ def _history_mapper(local_mapper):
polymorphic_on = col
if super_mapper:
- super_fks.append(('version', super_history_mapper.base_mapper.local_table.c.version))
- cols.append(Column('version', Integer, primary_key=True))
+ super_fks.append(('version', super_history_mapper.local_table.c.version))
+ cols.append(Column('version', Integer, primary_key=True, autoincrement=False))
else:
- cols.append(Column('version', Integer, primary_key=True))
+ cols.append(Column('version', Integer, primary_key=True, autoincrement=False))
if super_fks:
cols.append(ForeignKeyConstraint(*zip(*super_fks)))
@@ -181,4 +184,4 @@ def versioned_session(session):
for obj in versioned_objects(session.dirty):
create_version(obj, session)
for obj in versioned_objects(session.deleted):
- create_version(obj, session, deleted = True) \ No newline at end of file
+ create_version(obj, session, deleted = True)
diff --git a/examples/versioning/test_versioning.py b/examples/versioned_history/test_versioning.py
index 297d9e47e..906280555 100644
--- a/examples/versioning/test_versioning.py
+++ b/examples/versioned_history/test_versioning.py
@@ -1,40 +1,46 @@
+"""Unit tests illustrating usage of the ``history_meta.py`` module functions."""
+
from unittest import TestCase
from sqlalchemy.ext.declarative import declarative_base
from .history_meta import Versioned, versioned_session
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
-from sqlalchemy.orm import clear_mappers, sessionmaker, deferred, relationship
-from ._lib import ComparableEntity, eq_
+from sqlalchemy.orm import clear_mappers, Session, deferred, relationship
+from sqlalchemy.testing import AssertsCompiledSQL, eq_, assert_raises
+from sqlalchemy.testing.entities import BasicEntity, ComparableEntity
+from sqlalchemy.orm import exc as orm_exc
-engine = Session = None
+engine = None
def setup():
global engine
engine = create_engine('sqlite://', echo=True)
-class TestVersioning(TestCase):
+class TestVersioning(TestCase, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
def setUp(self):
- global Base, Session, Versioned
- Base = declarative_base()
- Session = sessionmaker(engine)
- versioned_session(Session)
+ self.session = Session(engine)
+ self.Base = declarative_base()
+ versioned_session(self.session)
def tearDown(self):
+ self.session.close()
clear_mappers()
- Base.metadata.drop_all(engine)
+ self.Base.metadata.drop_all(engine)
def create_tables(self):
- Base.metadata.create_all(engine)
+ self.Base.metadata.create_all(engine)
def test_plain(self):
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass(name='sc1')
sess.add(sc)
sess.commit()
@@ -90,15 +96,44 @@ class TestVersioning(TestCase):
]
)
+ def test_w_mapper_versioning(self):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
+ __tablename__ = 'sometable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+
+ SomeClass.__mapper__.version_id_col = SomeClass.__table__.c.version
+
+ self.create_tables()
+ sess = self.session
+ sc = SomeClass(name='sc1')
+ sess.add(sc)
+ sess.commit()
+
+ s2 = Session(sess.bind)
+ sc2 = s2.query(SomeClass).first()
+ sc2.name = 'sc1modified'
+
+ sc.name = 'sc1modified_again'
+ sess.commit()
+
+ eq_(sc.version, 2)
+
+ assert_raises(
+ orm_exc.StaleDataError,
+ s2.flush
+ )
+
def test_from_null(self):
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
name = Column(String(50))
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass()
sess.add(sc)
sess.commit()
@@ -111,7 +146,7 @@ class TestVersioning(TestCase):
def test_deferred(self):
"""test versioning of unloaded, deferred columns."""
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
@@ -119,7 +154,7 @@ class TestVersioning(TestCase):
data = deferred(Column(String(25)))
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass(name='sc1', data='somedata')
sess.add(sc)
sess.commit()
@@ -142,7 +177,7 @@ class TestVersioning(TestCase):
def test_joined_inheritance(self):
- class BaseClass(Versioned, Base, ComparableEntity):
+ class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
id = Column(Integer, primary_key=True)
@@ -169,7 +204,7 @@ class TestVersioning(TestCase):
__mapper_args__ = {'polymorphic_identity':'same'}
self.create_tables()
- sess = Session()
+ sess = self.session
sep1 = SubClassSeparatePk(name='sep1', subdata1='sep1subdata')
base1 = BaseClass(name='base1')
@@ -218,8 +253,82 @@ class TestVersioning(TestCase):
]
)
+ def test_joined_inheritance_multilevel(self):
+ class BaseClass(Versioned, self.Base, ComparableEntity):
+ __tablename__ = 'basetable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ type = Column(String(20))
+
+ __mapper_args__ = {'polymorphic_on': type,
+ 'polymorphic_identity': 'base'}
+
+ class SubClass(BaseClass):
+ __tablename__ = 'subtable'
+
+ id = Column(Integer, primary_key=True)
+ base_id = Column(Integer, ForeignKey('basetable.id'))
+ subdata1 = Column(String(50))
+
+ __mapper_args__ = {'polymorphic_identity': 'sub'}
+
+ class SubSubClass(SubClass):
+ __tablename__ = 'subsubtable'
+
+ id = Column(Integer, ForeignKey('subtable.id'), primary_key=True)
+ subdata2 = Column(String(50))
+
+ __mapper_args__ = {'polymorphic_identity': 'subsub'}
+
+ self.create_tables()
+
+ SubSubHistory = SubSubClass.__history_mapper__.class_
+ sess = self.session
+ q = sess.query(SubSubHistory)
+ self.assert_compile(
+ q,
+ "SELECT subsubtable_history.id AS subsubtable_history_id, "
+ "subtable_history.id AS subtable_history_id, "
+ "basetable_history.id AS basetable_history_id, "
+ "basetable_history.name AS basetable_history_name, "
+ "basetable_history.type AS basetable_history_type, "
+ "subsubtable_history.version AS subsubtable_history_version, "
+ "subtable_history.version AS subtable_history_version, "
+ "basetable_history.version AS basetable_history_version, "
+ "subtable_history.base_id AS subtable_history_base_id, "
+ "subtable_history.subdata1 AS subtable_history_subdata1, "
+ "subsubtable_history.subdata2 AS subsubtable_history_subdata2 "
+ "FROM basetable_history "
+ "JOIN subtable_history "
+ "ON basetable_history.id = subtable_history.base_id "
+ "AND basetable_history.version = subtable_history.version "
+ "JOIN subsubtable_history ON subtable_history.id = "
+ "subsubtable_history.id AND subtable_history.version = subsubtable_history.version"
+ )
+
+ ssc = SubSubClass(name='ss1', subdata1='sd1', subdata2='sd2')
+ sess.add(ssc)
+ sess.commit()
+ eq_(
+ sess.query(SubSubHistory).all(),
+ []
+ )
+ ssc.subdata1 = 'sd11'
+ ssc.subdata2 = 'sd22'
+ sess.commit()
+ eq_(
+ sess.query(SubSubHistory).all(),
+ [SubSubHistory(name='ss1', subdata1='sd1',
+ subdata2='sd2', type='subsub', version=1)]
+ )
+ eq_(ssc, SubSubClass(name='ss1', subdata1='sd11',
+ subdata2='sd22', version=2))
+
+
+
def test_single_inheritance(self):
- class BaseClass(Versioned, Base, ComparableEntity):
+ class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
id = Column(Integer, primary_key=True)
@@ -233,7 +342,7 @@ class TestVersioning(TestCase):
__mapper_args__ = {'polymorphic_identity':'sub'}
self.create_tables()
- sess = Session()
+ sess = self.session
b1 = BaseClass(name='b1')
sc = SubClass(name='s1', subname='sc1')
@@ -270,7 +379,7 @@ class TestVersioning(TestCase):
sess.flush()
def test_unique(self):
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
@@ -278,7 +387,7 @@ class TestVersioning(TestCase):
data = Column(String(50))
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass(name='sc1', data='sc1')
sess.add(sc)
sess.commit()
@@ -295,12 +404,12 @@ class TestVersioning(TestCase):
def test_relationship(self):
- class SomeRelated(Base, ComparableEntity):
+ class SomeRelated(self.Base, ComparableEntity):
__tablename__ = 'somerelated'
id = Column(Integer, primary_key=True)
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
@@ -311,7 +420,7 @@ class TestVersioning(TestCase):
SomeClassHistory = SomeClass.__history_mapper__.class_
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass(name='sc1')
sess.add(sc)
sess.commit()
@@ -343,7 +452,7 @@ class TestVersioning(TestCase):
def test_backref_relationship(self):
- class SomeRelated(Base, ComparableEntity):
+ class SomeRelated(self.Base, ComparableEntity):
__tablename__ = 'somerelated'
id = Column(Integer, primary_key=True)
@@ -351,13 +460,13 @@ class TestVersioning(TestCase):
related_id = Column(Integer, ForeignKey('sometable.id'))
related = relationship("SomeClass", backref='related')
- class SomeClass(Versioned, Base, ComparableEntity):
+ class SomeClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'sometable'
id = Column(Integer, primary_key=True)
self.create_tables()
- sess = Session()
+ sess = self.session
sc = SomeClass()
sess.add(sc)
sess.commit()
diff --git a/examples/versioned_rows/__init__.py b/examples/versioned_rows/__init__.py
new file mode 100644
index 000000000..7a4e89c33
--- /dev/null
+++ b/examples/versioned_rows/__init__.py
@@ -0,0 +1,7 @@
+"""
+Illustrates an extension which versions data by storing new rows for each change;
+that is, what would normally be an UPDATE becomes an INSERT.
+
+.. autosource::
+
+""" \ No newline at end of file
diff --git a/examples/versioned_rows/versioned_map.py b/examples/versioned_rows/versioned_map.py
new file mode 100644
index 000000000..774bfbe05
--- /dev/null
+++ b/examples/versioned_rows/versioned_map.py
@@ -0,0 +1,284 @@
+"""A variant of the versioned_rows example. Here
+we store a dictionary of key/value pairs, storing the k/v's in a
+"vertical" fashion where each key gets a row. The value is split out
+into two separate datatypes, string and int - the range of datatype
+storage can be adjusted for individual needs.
+
+Changes to the "data" attribute of a ConfigData object result in the
+ConfigData object being copied into a new one, and new associations to
+its data are created. Values which aren't changed between versions are
+referenced by both the former and the newer ConfigData object.
+Overall, only INSERT statements are emitted - no rows are UPDATed or
+DELETEd.
+
+An optional feature is also illustrated which associates individual
+key/value pairs with the ConfigData object in which it first
+originated. Since a new row is only persisted when a new value is
+created for a particular key, the recipe provides a way to query among
+the full series of changes which occurred for any particular key in
+the dictionary.
+
+The set of all ConfigData in a particular table represents a single
+series of versions. By adding additional columns to ConfigData, the
+system can be made to store multiple version streams distinguished by
+those additional values.
+
+"""
+
+from sqlalchemy import Column, String, Integer, ForeignKey, \
+ create_engine
+from sqlalchemy.orm.interfaces import SessionExtension
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import attributes, relationship, backref, \
+ sessionmaker, make_transient, validates
+from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.orm.collections import attribute_mapped_collection
+
+class VersionExtension(SessionExtension):
+ """Apply the new_version() method of objects which are
+ marked as dirty during a flush.
+
+ See http://www.sqlalchemy.org/trac/wiki/UsageRecipes/VersionedRows
+
+ """
+ def before_flush(self, session, flush_context, instances):
+ for instance in session.dirty:
+ if hasattr(instance, 'new_version') and \
+ session.is_modified(instance, passive=True):
+
+ # make it transient
+ instance.new_version(session)
+
+ # re-add
+ session.add(instance)
+
+Base = declarative_base()
+
+class ConfigData(Base):
+ """Represent a series of key/value pairs.
+
+ ConfigData will generate a new version of itself
+ upon change.
+
+ The "data" dictionary provides access via
+ string name mapped to a string/int value.
+
+ """
+ __tablename__ = 'config'
+
+ id = Column(Integer, primary_key=True)
+ """Primary key column of this ConfigData."""
+
+ elements = relationship("ConfigValueAssociation",
+ collection_class=attribute_mapped_collection("name"),
+ backref=backref("config_data"),
+ lazy="subquery"
+ )
+ """Dictionary-backed collection of ConfigValueAssociation objects,
+ keyed to the name of the associated ConfigValue.
+
+ Note there's no "cascade" here. ConfigValueAssociation objects
+ are never deleted or changed.
+ """
+
+ def _new_value(name, value):
+ """Create a new entry for usage in the 'elements' dictionary."""
+ return ConfigValueAssociation(ConfigValue(name, value))
+
+ data = association_proxy("elements", "value", creator=_new_value)
+ """Proxy to the 'value' elements of each related ConfigValue,
+ via the 'elements' dictionary.
+ """
+
+ def __init__(self, data):
+ self.data = data
+
+ @validates('elements')
+ def _associate_with_element(self, key, element):
+ """Associate incoming ConfigValues with this
+ ConfigData, if not already associated.
+
+ This is an optional feature which allows
+ more comprehensive history tracking.
+
+ """
+ if element.config_value.originating_config is None:
+ element.config_value.originating_config = self
+ return element
+
+ def new_version(self, session):
+ # convert to an INSERT
+ make_transient(self)
+ self.id = None
+
+ # history of the 'elements' collecton.
+ # this is a tuple of groups: (added, unchanged, deleted)
+ hist = attributes.get_history(self, 'elements')
+
+ # rewrite the 'elements' collection
+ # from scratch, removing all history
+ attributes.set_committed_value(self, 'elements', {})
+
+ # new elements in the "added" group
+ # are moved to our new collection.
+ for elem in hist.added:
+ self.elements[elem.name] = elem
+
+ # copy elements in the 'unchanged' group.
+ # the new ones associate with the new ConfigData,
+ # the old ones stay associated with the old ConfigData
+ for elem in hist.unchanged:
+ self.elements[elem.name] = ConfigValueAssociation(elem.config_value)
+
+ # we also need to expire changes on each ConfigValueAssociation
+ # that is to remain associated with the old ConfigData.
+ # Here, each one takes care of that in its new_version()
+ # method, though we could do that here as well.
+
+
+class ConfigValueAssociation(Base):
+ """Relate ConfigData objects to associated ConfigValue objects."""
+
+ __tablename__ = 'config_value_association'
+
+ config_id = Column(ForeignKey('config.id'), primary_key=True)
+ """Reference the primary key of the ConfigData object."""
+
+
+ config_value_id = Column(ForeignKey('config_value.id'), primary_key=True)
+ """Reference the primary key of hte ConfigValue object."""
+
+ config_value = relationship("ConfigValue", lazy="joined", innerjoin=True)
+ """Reference the related ConfigValue object."""
+
+ def __init__(self, config_value):
+ self.config_value = config_value
+
+ def new_version(self, session):
+ """Expire all pending state, as ConfigValueAssociation is immutable."""
+
+ session.expire(self)
+
+ @property
+ def name(self):
+ return self.config_value.name
+
+ @property
+ def value(self):
+ return self.config_value.value
+
+ @value.setter
+ def value(self, value):
+ """Intercept set events.
+
+ Create a new ConfigValueAssociation upon change,
+ replacing this one in the parent ConfigData's dictionary.
+
+ If no net change, do nothing.
+
+ """
+ if value != self.config_value.value:
+ self.config_data.elements[self.name] = \
+ ConfigValueAssociation(
+ ConfigValue(self.config_value.name, value)
+ )
+
+class ConfigValue(Base):
+ """Represent an individual key/value pair at a given point in time.
+
+ ConfigValue is immutable.
+
+ """
+ __tablename__ = 'config_value'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50), nullable=False)
+ originating_config_id = Column(Integer, ForeignKey('config.id'),
+ nullable=False)
+ int_value = Column(Integer)
+ string_value = Column(String(255))
+
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+
+ originating_config = relationship("ConfigData")
+ """Reference to the originating ConfigData.
+
+ This is optional, and allows history tracking of
+ individual values.
+
+ """
+
+ def new_version(self, session):
+ raise NotImplementedError("ConfigValue is immutable.")
+
+ @property
+ def value(self):
+ for k in ('int_value', 'string_value'):
+ v = getattr(self, k)
+ if v is not None:
+ return v
+ else:
+ return None
+
+ @value.setter
+ def value(self, value):
+ if isinstance(value, int):
+ self.int_value = value
+ self.string_value = None
+ else:
+ self.string_value = str(value)
+ self.int_value = None
+
+if __name__ == '__main__':
+ engine = create_engine('sqlite://', echo=True)
+ Base.metadata.create_all(engine)
+ Session = sessionmaker(bind=engine, extension=VersionExtension())
+
+ sess = Session()
+
+ config = ConfigData({
+ 'user_name':'twitter',
+ 'hash_id':'4fedffca37eaf',
+ 'x':27,
+ 'y':450
+ })
+
+ sess.add(config)
+ sess.commit()
+ version_one = config.id
+
+ config.data['user_name'] = 'yahoo'
+ sess.commit()
+
+ version_two = config.id
+
+ assert version_one != version_two
+
+ # two versions have been created.
+
+ assert config.data == {
+ 'user_name':'yahoo',
+ 'hash_id':'4fedffca37eaf',
+ 'x':27,
+ 'y':450
+ }
+
+ old_config = sess.query(ConfigData).get(version_one)
+ assert old_config.data == {
+ 'user_name':'twitter',
+ 'hash_id':'4fedffca37eaf',
+ 'x':27,
+ 'y':450
+ }
+
+ # the history of any key can be acquired using
+ # the originating_config_id attribute
+ history = sess.query(ConfigValue).\
+ filter(ConfigValue.name=='user_name').\
+ order_by(ConfigValue.originating_config_id).\
+ all()
+
+ assert [(h.value, h.originating_config_id) for h in history] == \
+ [('twitter', version_one), ('yahoo', version_two)]
diff --git a/examples/versioned_rows/versioned_rows.py b/examples/versioned_rows/versioned_rows.py
new file mode 100644
index 000000000..30acf4e0d
--- /dev/null
+++ b/examples/versioned_rows/versioned_rows.py
@@ -0,0 +1,105 @@
+"""Illustrates a method to intercept changes on objects, turning
+an UPDATE statement on a single row into an INSERT statement, so that a new
+row is inserted with the new data, keeping the old row intact.
+
+"""
+from sqlalchemy.orm import *
+from sqlalchemy import *
+from sqlalchemy.orm.interfaces import SessionExtension
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import attributes
+
+class Versioned(object):
+ def new_version(self, session):
+ # if on SQLA 0.6.1 or earlier,
+ # make sure 'id' isn't expired.
+ # self.id
+
+ # make us transient (removes persistent
+ # identity).
+ make_transient(self)
+
+ # set 'id' to None.
+ # a new PK will be generated on INSERT.
+ self.id = None
+
+class VersionExtension(SessionExtension):
+ def before_flush(self, session, flush_context, instances):
+ for instance in session.dirty:
+ if not isinstance(instance, Versioned):
+ continue
+ if not session.is_modified(instance, passive=True):
+ continue
+
+ if not attributes.instance_state(instance).has_identity:
+ continue
+
+ # make it transient
+ instance.new_version(session)
+ # re-add
+ session.add(instance)
+
+Base = declarative_base()
+
+engine = create_engine('sqlite://', echo=True)
+
+Session = sessionmaker(engine, extension=[VersionExtension()])
+
+# example 1, simple versioning
+
+class Example(Versioned, Base):
+ __tablename__ = 'example'
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+Base.metadata.create_all(engine)
+
+session = Session()
+e1 = Example(data='e1')
+session.add(e1)
+session.commit()
+
+e1.data = 'e2'
+session.commit()
+
+assert session.query(Example.id, Example.data).order_by(Example.id).all() == \
+ [(1, 'e1'), (2, 'e2')]
+
+# example 2, versioning with a parent
+
+class Parent(Base):
+ __tablename__ = 'parent'
+ id = Column(Integer, primary_key=True)
+ child_id = Column(Integer, ForeignKey('child.id'))
+ child = relationship("Child", backref=backref('parent', uselist=False))
+
+class Child(Versioned, Base):
+ __tablename__ = 'child'
+
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+ def new_version(self, session):
+ # expire parent's reference to us
+ session.expire(self.parent, ['child'])
+
+ # create new version
+ Versioned.new_version(self, session)
+
+ # re-add ourselves to the parent
+ self.parent.child = self
+
+Base.metadata.create_all(engine)
+
+session = Session()
+
+p1 = Parent(child=Child(data='c1'))
+session.add(p1)
+session.commit()
+
+p1.child.data = 'c2'
+session.commit()
+
+assert p1.child_id == 2
+assert session.query(Child.id, Child.data).order_by(Child.id).all() == \
+ [(1, 'c1'), (2, 'c2')] \ No newline at end of file
diff --git a/examples/versioning/_lib.py b/examples/versioning/_lib.py
deleted file mode 100644
index 9132f9b35..000000000
--- a/examples/versioning/_lib.py
+++ /dev/null
@@ -1,96 +0,0 @@
-"""copy of ComparableEntity and eq_() from test.lib.
-
-This is just to support running the example outside of
-the SQLA testing environment which is no longer part of
-SQLAlchemy as of 0.7.
-
-"""
-
-import sqlalchemy as sa
-from sqlalchemy import exc as sa_exc
-
-
-def eq_(a, b, msg=None):
- """Assert a == b, with repr messaging on failure."""
- assert a == b, msg or "%r != %r" % (a, b)
-
-_repr_stack = set()
-class BasicEntity(object):
- def __init__(self, **kw):
- for key, value in kw.items():
- setattr(self, key, value)
-
- def __repr__(self):
- if id(self) in _repr_stack:
- return object.__repr__(self)
- _repr_stack.add(id(self))
- try:
- return "%s(%s)" % (
- (self.__class__.__name__),
- ', '.join(["%s=%r" % (key, getattr(self, key))
- for key in sorted(self.__dict__.keys())
- if not key.startswith('_')]))
- finally:
- _repr_stack.remove(id(self))
-
-_recursion_stack = set()
-class ComparableEntity(BasicEntity):
- def __hash__(self):
- return hash(self.__class__)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __eq__(self, other):
- """'Deep, sparse compare.
-
- Deeply compare two entities, following the non-None attributes of the
- non-persisted object, if possible.
-
- """
- if other is self:
- return True
- elif not self.__class__ == other.__class__:
- return False
-
- if id(self) in _recursion_stack:
- return True
- _recursion_stack.add(id(self))
-
- try:
- # pick the entity thats not SA persisted as the source
- try:
- self_key = sa.orm.attributes.instance_state(self).key
- except sa.orm.exc.NO_STATE:
- self_key = None
-
- if other is None:
- a = self
- b = other
- elif self_key is not None:
- a = other
- b = self
- else:
- a = self
- b = other
-
- for attr in a.__dict__.keys():
- if attr.startswith('_'):
- continue
- value = getattr(a, attr)
-
- try:
- # handle lazy loader errors
- battr = getattr(b, attr)
- except (AttributeError, sa_exc.UnboundExecutionError):
- return False
-
- if hasattr(value, '__iter__'):
- if list(value) != list(battr):
- return False
- else:
- if value is not None and value != battr:
- return False
- return True
- finally:
- _recursion_stack.remove(id(self))
diff --git a/examples/vertical/__init__.py b/examples/vertical/__init__.py
index 6073da91c..0b69f32ea 100644
--- a/examples/vertical/__init__.py
+++ b/examples/vertical/__init__.py
@@ -29,5 +29,6 @@ Example::
AnimalFact.value == True))))
print 'weasel-like animals', q.all()
+.. autosource::
""" \ No newline at end of file
diff --git a/examples/vertical/dictlike-polymorphic.py b/examples/vertical/dictlike-polymorphic.py
index 872a7c52e..e3d5ba578 100644
--- a/examples/vertical/dictlike-polymorphic.py
+++ b/examples/vertical/dictlike-polymorphic.py
@@ -1,10 +1,7 @@
"""Mapping a polymorphic-valued vertical table as a dictionary.
-This example illustrates accessing and modifying a "vertical" (or
-"properties", or pivoted) table via a dict-like interface. The 'dictlike.py'
-example explains the basics of vertical tables and the general approach. This
-example adds a twist- the vertical table holds several "value" columns, one
-for each type of data that can be stored. For example::
+Builds upon the dictlike.py example to also add differently typed
+columns to the "fact" table, e.g.::
Table('properties', metadata
Column('owner_id', Integer, ForeignKey('owner.id'),
@@ -25,80 +22,43 @@ we'll use a @hybrid_property to build a smart '.value' attribute that wraps up
reading and writing those various '_value' columns and keeps the '.type' up to
date.
-Class decorators are used, so Python 2.6 or greater is required.
"""
from sqlalchemy.orm.interfaces import PropComparator
-from sqlalchemy.orm import comparable_property
from sqlalchemy.ext.hybrid import hybrid_property
-
-# Using the VerticalPropertyDictMixin from the base example
-from .dictlike import VerticalPropertyDictMixin
+from sqlalchemy import event
+from sqlalchemy import literal_column
+from .dictlike import ProxiedDictMixin
class PolymorphicVerticalProperty(object):
"""A key/value pair with polymorphic value storage.
- Supplies a smart 'value' attribute that provides convenient read/write
- access to the row's current value without the caller needing to worry
- about the 'type' attribute or multiple columns.
-
- The 'value' attribute can also be used for basic comparisons in queries,
- allowing the row's logical value to be compared without foreknowledge of
- which column it might be in. This is not going to be a very efficient
- operation on the database side, but it is possible. If you're mapping to
- an existing database and you have some rows with a value of str('1') and
- others of int(1), then this could be useful.
-
- Subclasses must provide a 'type_map' class attribute with the following
- form::
-
- type_map = {
- <python type> : ('type column value', 'column name'),
- # ...
- }
-
- For example,::
-
- type_map = {
- int: ('integer', 'integer_value'),
- str: ('varchar', 'varchar_value'),
- }
+ The class which is mapped should indicate typing information
+ within the "info" dictionary of mapped Column objects; see
+ the AnimalFact mapping below for an example.
- Would indicate that a Python int value should be stored in the
- 'integer_value' column and the .type set to 'integer'. Conversely, if the
- value of '.type' is 'integer, then the 'integer_value' column is consulted
- for the current value.
"""
- type_map = {
- type(None): (None, None),
- }
-
def __init__(self, key, value=None):
self.key = key
self.value = value
@hybrid_property
def value(self):
- for discriminator, field in self.type_map.values():
- if self.type == discriminator:
- return getattr(self, field)
- return None
+ fieldname, discriminator = self.type_map[self.type]
+ if fieldname is None:
+ return None
+ else:
+ return getattr(self, fieldname)
@value.setter
def value(self, value):
py_type = type(value)
- if py_type not in self.type_map:
- raise TypeError(py_type)
-
- for field_type in self.type_map:
- discriminator, field = self.type_map[field_type]
- field_value = None
- if py_type == field_type:
- self.type = discriminator
- field_value = value
- if field is not None:
- setattr(self, field, field_value)
+ fieldname, discriminator = self.type_map[py_type]
+
+ self.type = discriminator
+ if fieldname is not None:
+ setattr(self, fieldname, value)
@value.deleter
def value(self):
@@ -113,9 +73,14 @@ class PolymorphicVerticalProperty(object):
self.cls = cls
def _case(self):
- whens = [(text("'%s'" % p[0]), cast(getattr(self.cls, p[1]), String))
- for p in self.cls.type_map.values()
- if p[1] is not None]
+ pairs = set(self.cls.type_map.values())
+ whens = [
+ (
+ literal_column("'%s'" % discriminator),
+ cast(getattr(self.cls, attribute), String)
+ ) for attribute, discriminator in pairs
+ if attribute is not None
+ ]
return case(whens, self.cls.type, null())
def __eq__(self, other):
return self._case() == cast(other, String)
@@ -125,69 +90,78 @@ class PolymorphicVerticalProperty(object):
def __repr__(self):
return '<%s %r=%r>' % (self.__class__.__name__, self.key, self.value)
+@event.listens_for(PolymorphicVerticalProperty, "mapper_configured", propagate=True)
+def on_new_class(mapper, cls_):
+ """Look for Column objects with type info in them, and work up
+ a lookup table."""
+
+ info_dict = {}
+ info_dict[type(None)] = (None, 'none')
+ info_dict['none'] = (None, 'none')
+
+ for k in mapper.c.keys():
+ col = mapper.c[k]
+ if 'type' in col.info:
+ python_type, discriminator = col.info['type']
+ info_dict[python_type] = (k, discriminator)
+ info_dict[discriminator] = (k, discriminator)
+ cls_.type_map = info_dict
if __name__ == '__main__':
- from sqlalchemy import (MetaData, Table, Column, Integer, Unicode,
- ForeignKey, UnicodeText, and_, not_, or_, String, Boolean, cast, text,
+ from sqlalchemy import (Column, Integer, Unicode,
+ ForeignKey, UnicodeText, and_, or_, String, Boolean, cast,
null, case, create_engine)
- from sqlalchemy.orm import mapper, relationship, Session
+ from sqlalchemy.orm import relationship, Session
from sqlalchemy.orm.collections import attribute_mapped_collection
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy.ext.associationproxy import association_proxy
+
+ Base = declarative_base()
- metadata = MetaData()
-
- animals = Table('animal', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', Unicode(100)))
-
- chars = Table('facts', metadata,
- Column('animal_id', Integer, ForeignKey('animal.id'),
- primary_key=True),
- Column('key', Unicode(64), primary_key=True),
- Column('type', Unicode(16), default=None),
- Column('int_value', Integer, default=None),
- Column('char_value', UnicodeText, default=None),
- Column('boolean_value', Boolean, default=None))
-
- class AnimalFact(PolymorphicVerticalProperty):
- type_map = {
- int: ('integer', 'int_value'),
- str: ('char', 'char_value'),
- bool: ('boolean', 'boolean_value'),
- type(None): (None, None),
- }
-
- class Animal(VerticalPropertyDictMixin):
- """An animal.
-
- Animal facts are available via the 'facts' property or by using
- dict-like accessors on an Animal instance::
-
- cat['color'] = 'calico'
- # or, equivalently:
- cat.facts['color'] = AnimalFact('color', 'calico')
- """
- _property_type = AnimalFact
- _property_mapping = 'facts'
+ class AnimalFact(PolymorphicVerticalProperty, Base):
+ """A fact about an animal."""
+
+ __tablename__ = 'animal_fact'
+
+ animal_id = Column(ForeignKey('animal.id'), primary_key=True)
+ key = Column(Unicode(64), primary_key=True)
+ type = Column(Unicode(16))
+
+ # add information about storage for different types
+ # in the info dictionary of Columns
+ int_value = Column(Integer, info={'type': (int, 'integer')})
+ char_value = Column(UnicodeText, info={'type': (str, 'string')})
+ boolean_value = Column(Boolean, info={'type': (bool, 'boolean')})
+
+ class Animal(ProxiedDictMixin._base_class(Base)):
+ """an Animal"""
+
+ __tablename__ = 'animal'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode(100))
+
+ facts = relationship("AnimalFact",
+ collection_class=attribute_mapped_collection('key'))
+
+ _proxied = association_proxy("facts", "value",
+ creator=
+ lambda key, value: AnimalFact(key=key, value=value))
def __init__(self, name):
self.name = name
def __repr__(self):
- return '<%s %r>' % (self.__class__.__name__, self.name)
-
+ return "Animal(%r)" % self.name
- mapper(Animal, animals, properties={
- 'facts': relationship(
- AnimalFact, backref='animal',
- collection_class=attribute_mapped_collection('key')),
- })
-
- mapper(AnimalFact, chars)
+ @classmethod
+ def with_characteristic(self, key, value):
+ return self.facts.any(key=key, value=value)
engine = create_engine('sqlite://', echo=True)
- metadata.create_all(engine)
+ Base.metadata.create_all(engine)
session = Session(engine)
stoat = Animal('stoat')
@@ -227,30 +201,24 @@ if __name__ == '__main__':
AnimalFact.value == True))))
print('weasel-like animals', q.all())
- # Save some typing by wrapping that up in a function:
- with_characteristic = lambda key, value: and_(AnimalFact.key == key,
- AnimalFact.value == value)
-
q = (session.query(Animal).
- filter(Animal.facts.any(
- with_characteristic('weasel-like', True))))
+ filter(Animal.with_characteristic('weasel-like', True)))
print('weasel-like animals again', q.all())
q = (session.query(Animal).
- filter(Animal.facts.any(with_characteristic('poisonous', False))))
+ filter(Animal.with_characteristic('poisonous', False)))
print('animals with poisonous=False', q.all())
q = (session.query(Animal).
- filter(or_(Animal.facts.any(
- with_characteristic('poisonous', False)),
- not_(Animal.facts.any(AnimalFact.key == 'poisonous')))))
+ filter(or_(
+ Animal.with_characteristic('poisonous', False),
+ ~Animal.facts.any(AnimalFact.key == 'poisonous')
+ )
+ )
+ )
print('non-poisonous animals', q.all())
q = (session.query(Animal).
filter(Animal.facts.any(AnimalFact.value == 5)))
print('any animal with a .value of 5', q.all())
- # Facts can be queried as well.
- q = (session.query(AnimalFact).
- filter(with_characteristic('cuteness', 'very cute')))
- print(q.all())
diff --git a/examples/vertical/dictlike.py b/examples/vertical/dictlike.py
index f17d1acc8..08989d8c2 100644
--- a/examples/vertical/dictlike.py
+++ b/examples/vertical/dictlike.py
@@ -30,150 +30,83 @@ accessing them like a Python dict can be very convenient. The example below
can be used with many common vertical schemas as-is or with minor adaptations.
"""
+from __future__ import unicode_literals
-class VerticalProperty(object):
- """A key/value pair.
-
- This class models rows in the vertical table.
- """
-
- def __init__(self, key, value):
- self.key = key
- self.value = value
-
- def __repr__(self):
- return '<%s %r=%r>' % (self.__class__.__name__, self.key, self.value)
-
-
-class VerticalPropertyDictMixin(object):
+class ProxiedDictMixin(object):
"""Adds obj[key] access to a mapped class.
- This is a mixin class. It can be inherited from directly, or included
- with multiple inheritence.
-
- Classes using this mixin must define two class properties::
-
- _property_type:
- The mapped type of the vertical key/value pair instances. Will be
- invoked with two positional arugments: key, value
-
- _property_mapping:
- A string, the name of the Python attribute holding a dict-based
- relationship of _property_type instances.
-
- Using the VerticalProperty class above as an example,::
-
- class MyObj(VerticalPropertyDictMixin):
- _property_type = VerticalProperty
- _property_mapping = 'props'
-
- mapper(MyObj, sometable, properties={
- 'props': relationship(VerticalProperty,
- collection_class=attribute_mapped_collection('key'))})
-
- Dict-like access to MyObj is proxied through to the 'props' relationship::
-
- myobj['key'] = 'value'
- # ...is shorthand for:
- myobj.props['key'] = VerticalProperty('key', 'value')
-
- myobj['key'] = 'updated value']
- # ...is shorthand for:
- myobj.props['key'].value = 'updated value'
-
- print myobj['key']
- # ...is shorthand for:
- print myobj.props['key'].value
+ This class basically proxies dictionary access to an attribute
+ called ``_proxied``. The class which inherits this class
+ should have an attribute called ``_proxied`` which points to a dictionary.
"""
- _property_type = VerticalProperty
- _property_mapping = None
+ def __len__(self):
+ return len(self._proxied)
- __map = property(lambda self: getattr(self, self._property_mapping))
+ def __iter__(self):
+ return iter(self._proxied)
def __getitem__(self, key):
- return self.__map[key].value
-
- def __setitem__(self, key, value):
- property = self.__map.get(key, None)
- if property is None:
- self.__map[key] = self._property_type(key, value)
- else:
- property.value = value
-
- def __delitem__(self, key):
- del self.__map[key]
+ return self._proxied[key]
def __contains__(self, key):
- return key in self.__map
-
- # Implement other dict methods to taste. Here are some examples:
- def keys(self):
- return self.__map.keys()
+ return key in self._proxied
- def values(self):
- return [prop.value for prop in self.__map.values()]
-
- def items(self):
- return [(key, prop.value) for key, prop in self.__map.items()]
+ def __setitem__(self, key, value):
+ self._proxied[key] = value
- def __iter__(self):
- return iter(self.keys())
+ def __delitem__(self, key):
+ del self._proxied[key]
if __name__ == '__main__':
- from sqlalchemy import (MetaData, Table, Column, Integer, Unicode,
- ForeignKey, UnicodeText, and_, not_, create_engine)
- from sqlalchemy.orm import mapper, relationship, Session
+ from sqlalchemy import (Column, Integer, Unicode,
+ ForeignKey, UnicodeText, and_, create_engine)
+ from sqlalchemy.orm import relationship, Session
from sqlalchemy.orm.collections import attribute_mapped_collection
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy.ext.associationproxy import association_proxy
- metadata = MetaData()
+ Base = declarative_base()
- # Here we have named animals, and a collection of facts about them.
- animals = Table('animal', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', Unicode(100)))
+ class AnimalFact(Base):
+ """A fact about an animal."""
- facts = Table('facts', metadata,
- Column('animal_id', Integer, ForeignKey('animal.id'),
- primary_key=True),
- Column('key', Unicode(64), primary_key=True),
- Column('value', UnicodeText, default=None),)
+ __tablename__ = 'animal_fact'
- class AnimalFact(VerticalProperty):
- """A fact about an animal."""
+ animal_id = Column(ForeignKey('animal.id'), primary_key=True)
+ key = Column(Unicode(64), primary_key=True)
+ value = Column(UnicodeText)
- class Animal(VerticalPropertyDictMixin):
- """An animal.
+ class Animal(ProxiedDictMixin, Base):
+ """an Animal"""
- Animal facts are available via the 'facts' property or by using
- dict-like accessors on an Animal instance::
+ __tablename__ = 'animal'
- cat['color'] = 'calico'
- # or, equivalently:
- cat.facts['color'] = AnimalFact('color', 'calico')
- """
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode(100))
- _property_type = AnimalFact
- _property_mapping = 'facts'
+ facts = relationship("AnimalFact",
+ collection_class=attribute_mapped_collection('key'))
+
+ _proxied = association_proxy("facts", "value",
+ creator=
+ lambda key, value: AnimalFact(key=key, value=value))
def __init__(self, name):
self.name = name
def __repr__(self):
- return '<%s %r>' % (self.__class__.__name__, self.name)
-
+ return "Animal(%r)" % self.name
- mapper(Animal, animals, properties={
- 'facts': relationship(
- AnimalFact, backref='animal',
- collection_class=attribute_mapped_collection('key')),
- })
- mapper(AnimalFact, facts)
+ @classmethod
+ def with_characteristic(self, key, value):
+ return self.facts.any(key=key, value=value)
engine = create_engine("sqlite://")
- metadata.create_all(engine)
+ Base.metadata.create_all(engine)
+
session = Session(bind=engine)
stoat = Animal('stoat')
@@ -194,9 +127,6 @@ if __name__ == '__main__':
critter['cuteness'] = 'very'
print('changing cuteness:')
- engine.echo = True
- session.commit()
- engine.echo = False
marten = Animal('marten')
marten['color'] = 'brown'
@@ -212,7 +142,6 @@ if __name__ == '__main__':
loris['cuteness'] = 'fairly'
loris['poisonous-part'] = 'elbows'
session.add(loris)
- session.commit()
q = (session.query(Animal).
filter(Animal.facts.any(
@@ -220,27 +149,17 @@ if __name__ == '__main__':
AnimalFact.value == 'reddish'))))
print('reddish animals', q.all())
- # Save some typing by wrapping that up in a function:
- with_characteristic = lambda key, value: and_(AnimalFact.key == key,
- AnimalFact.value == value)
-
- q = (session.query(Animal).
- filter(Animal.facts.any(
- with_characteristic('color', 'brown'))))
+ q = session.query(Animal).\
+ filter(Animal.with_characteristic("color", 'brown'))
print('brown animals', q.all())
- q = (session.query(Animal).
- filter(not_(Animal.facts.any(
- with_characteristic('poisonous-part', 'elbows')))))
+ q = session.query(Animal).\
+ filter(~Animal.with_characteristic("poisonous-part", 'elbows'))
print('animals without poisonous-part == elbows', q.all())
q = (session.query(Animal).
- filter(Animal.facts.any(AnimalFact.value == 'somewhat')))
+ filter(Animal.facts.any(value='somewhat')))
print('any animal with any .value of "somewhat"', q.all())
- # Facts can be queried as well.
- q = (session.query(AnimalFact).
- filter(with_characteristic('cuteness', 'very')))
- print('just the facts', q.all())