summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2012-10-21 16:54:42 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2012-10-21 16:54:42 -0400
commit39d17c76df542d0040c2c8db2d2e3dc897b5cce5 (patch)
treef5aeba8be0f61c0db8d5ba0e76efdaa593cd85c4
parentf2bc0ddcb496e6a0cb0a0ad88c7c055dbf0c11a7 (diff)
downloadsqlalchemy-39d17c76df542d0040c2c8db2d2e3dc897b5cce5.tar.gz
- converted beaker demo to dogpile.cache, [ticket:2589]
-rwxr-xr-x.hgignore2
-rw-r--r--doc/build/changelog/changelog_08.rst94
-rw-r--r--doc/build/orm/examples.rst8
-rw-r--r--doc/build/orm/session.rst2
-rw-r--r--examples/beaker_caching/caching_query.py279
-rw-r--r--examples/beaker_caching/environment.py67
-rw-r--r--examples/dogpile_caching/__init__.py (renamed from examples/beaker_caching/__init__.py)38
-rw-r--r--examples/dogpile_caching/advanced.py (renamed from examples/beaker_caching/advanced.py)2
-rw-r--r--examples/dogpile_caching/caching_query.py255
-rw-r--r--examples/dogpile_caching/environment.py83
-rw-r--r--examples/dogpile_caching/fixture_data.py (renamed from examples/beaker_caching/fixture_data.py)9
-rw-r--r--examples/dogpile_caching/helloworld.py (renamed from examples/beaker_caching/helloworld.py)14
-rw-r--r--examples/dogpile_caching/local_session_caching.py (renamed from examples/beaker_caching/local_session_caching.py)69
-rw-r--r--examples/dogpile_caching/model.py (renamed from examples/beaker_caching/model.py)6
-rw-r--r--examples/dogpile_caching/relation_caching.py (renamed from examples/beaker_caching/relation_caching.py)9
15 files changed, 478 insertions, 459 deletions
diff --git a/.hgignore b/.hgignore
index b6d496222..6e70f9319 100755
--- a/.hgignore
+++ b/.hgignore
@@ -8,7 +8,7 @@ syntax:regexp
.so$
.egg-info
.*,cover
-^beaker_data/
+^dogpile_data/
.un~
\.coverage
\.DS_Store
diff --git a/doc/build/changelog/changelog_08.rst b/doc/build/changelog/changelog_08.rst
index 40487001a..660154b72 100644
--- a/doc/build/changelog/changelog_08.rst
+++ b/doc/build/changelog/changelog_08.rst
@@ -3,14 +3,28 @@
0.8 Changelog
==============
-
+
.. changelog::
:version: 0.8.0b1
- :released:
+ :released:
+
+ .. change::
+ :tags: examples
+ :tickets: 2589
+
+ The Beaker caching example has been converted
+ to use `dogpile.cache <http://dogpilecache.readthedocs.org/>`_.
+ This is a new caching library written by the same
+ creator of Beaker's caching internals, and represents a
+ vastly improved, simplified, and modernized system of caching.
+
+ .. seealso::
+
+ :ref:`examples_caching`
.. change::
:tags: general
- :tickets:
+ :tickets:
SQLAlchemy 0.8 now targets Python 2.5 and
above. Python 2.4 is no longer supported.
@@ -42,7 +56,7 @@
.. change::
:tags: orm, moved
- :tickets:
+ :tickets:
The InstrumentationManager interface
and the entire related system of alternate
@@ -193,7 +207,7 @@
.. change::
:tags: orm, feature
- :tickets:
+ :tickets:
Added support for .info dictionary argument to
column_property(), relationship(), composite().
@@ -219,7 +233,7 @@
.. change::
:tags: orm, feature
- :tickets:
+ :tickets:
The Query.update() method is now
more lenient as to the table
@@ -238,7 +252,7 @@
.. change::
:tags: orm, feature
- :tickets:
+ :tickets:
New session events after_transaction_create
and after_transaction_end
@@ -350,7 +364,7 @@
.. change::
:tags: orm, feature
- :tickets:
+ :tickets:
The Session will produce warnings
when unsupported methods are used inside the
@@ -542,7 +556,7 @@
.. change::
:tags: orm, bug
- :tickets:
+ :tickets:
An error is emitted when uselist=False
is combined with a "dynamic" loader.
@@ -550,19 +564,19 @@
.. change::
:tags: removed, orm
- :tickets:
+ :tickets:
Deprecated identifiers removed:
-
+
* allow_null_pks mapper() argument
(use allow_partial_pks)
-
+
* _get_col_to_prop() mapper method
(use get_property_by_column())
-
+
* dont_load argument to Session.merge()
(use load=True)
-
+
* sqlalchemy.orm.shard module
(use sqlalchemy.ext.horizontal_shard)
@@ -589,7 +603,7 @@
.. change::
:tags: engine, feature
- :tickets:
+ :tickets:
The libraries used by the test suite
have been moved around a bit so that they are
@@ -608,7 +622,7 @@
.. change::
:tags: engine, bug
- :tickets:
+ :tickets:
The Inspector.get_table_names()
order_by="foreign_key" feature now sorts
@@ -649,7 +663,7 @@
.. change::
:tags: engine, feature
- :tickets:
+ :tickets:
Various API tweaks to the "dialect"
API to better support highly specialized
@@ -677,7 +691,7 @@
.. change::
:tags: engine, bug
- :tickets:
+ :tickets:
The autoload_replace flag on Table,
when False, will cause any reflected foreign key
@@ -701,7 +715,7 @@
.. change::
:tags: engine, feature
- :tickets:
+ :tickets:
New C extension module "utils" has
been added for additional function speedups
@@ -709,7 +723,7 @@
.. change::
:tags: engine
- :tickets:
+ :tickets:
ResultProxy.last_inserted_ids is removed,
replaced by inserted_primary_key.
@@ -748,7 +762,7 @@
.. change::
:tags: feature, sql
- :tickets:
+ :tickets:
The Core oeprator system now includes
the `getitem` operator, i.e. the bracket
@@ -761,7 +775,7 @@
operator schemes. `lshift` (<<)
and `rshift` (>>) are also supported as
optional operators.
-
+
Note that this change has the effect that
descriptor-based __getitem__ schemes used by
the ORM in conjunction with synonym() or other
@@ -796,14 +810,14 @@
.. change::
:tags: change, sql
- :tickets:
+ :tickets:
The Text() type renders the length
given to it, if a length was specified.
.. change::
:tags: feature, sql
- :tickets:
+ :tickets:
Custom unary operators can now be
used by combining operators.custom_op() with
@@ -821,7 +835,7 @@
.. change::
:tags: feature, sql
- :tickets:
+ :tickets:
Enhanced GenericFunction and func.*
to allow for user-defined GenericFunction
@@ -845,7 +859,7 @@
.. change::
:tags: changed, sql
- :tickets:
+ :tickets:
Most classes in expression.sql
are no longer preceded with an underscore,
@@ -938,7 +952,7 @@
.. change::
:tags: feature, sql
- :tickets:
+ :tickets:
select() features a correlate_except()
method, auto correlates all selectables except those
@@ -990,7 +1004,7 @@
.. change::
:tags: feature, sql
- :tickets:
+ :tickets:
"scalar" selects now have a WHERE method
to help with generative building. Also slight adjustment
@@ -1053,7 +1067,7 @@
.. change::
:tags: access, feature
- :tickets:
+ :tickets:
the MS Access dialect has been
moved to its own project on Bitbucket,
@@ -1067,7 +1081,7 @@
.. change::
:tags: maxdb, moved
- :tickets:
+ :tickets:
The MaxDB dialect, which hasn't been
functional for several years, is
@@ -1090,7 +1104,7 @@
.. change::
:tags: mssql, feature
- :tickets:
+ :tickets:
SQL Server dialect can be given
database-qualified schema names,
@@ -1117,7 +1131,7 @@
.. change::
:tags: mssql, feature
- :tickets:
+ :tickets:
updated support for the mxodbc
driver; mxodbc 3.2.1 is recommended for full
@@ -1135,7 +1149,7 @@
.. change::
:tags: postgresql, feature
- :tickets:
+ :tickets:
postgresql.ARRAY now supports
indexing and slicing. The Python [] operator
@@ -1148,7 +1162,7 @@
.. change::
:tags: postgresql, feature
- :tickets:
+ :tickets:
Added new "array literal" construct
postgresql.array(). Basically a "tuple" that
@@ -1166,7 +1180,7 @@
.. change::
:tags: postgresql, feature
- :tickets:
+ :tickets:
The "ischema_names" dictionary of the
Postgresql dialect is "unofficially" customizable.
@@ -1176,7 +1190,7 @@
types with variable numbers of arguments.
The functionality here is "unofficial" for
three reasons:
-
+
1. this is not an "official" API. Ideally
an "official" API would allow custom type-handling
callables at the dialect or global level
@@ -1189,7 +1203,7 @@
3. The reflection code here is only tested against
simple types and probably has issues with more
compositional types.
-
+
patch courtesy Éric Lemoine.
.. change::
@@ -1211,7 +1225,7 @@
.. change::
:tags: firebird, bug
- :tickets:
+ :tickets:
Firebird now uses strict "ansi bind rules"
so that bound parameters don't render in the
@@ -1220,7 +1234,7 @@
.. change::
:tags: firebird, bug
- :tickets:
+ :tickets:
Support for passing datetime as date when
using the DateTime type with Firebird; other
@@ -1280,7 +1294,7 @@
.. change::
:tags: oracle, bug
- :tickets:
+ :tickets:
The CreateIndex construct in Oracle
will now schema-qualify the name of the index
diff --git a/doc/build/orm/examples.rst b/doc/build/orm/examples.rst
index 6560547cd..03d69cf4c 100644
--- a/doc/build/orm/examples.rst
+++ b/doc/build/orm/examples.rst
@@ -37,12 +37,12 @@ Location: /examples/custom_attributes/
.. _examples_caching:
-Beaker Caching
---------------
+Dogpile Caching
+---------------
-Location: /examples/beaker_caching/
+Location: /examples/dogpile_caching/
-.. automodule:: beaker_caching
+.. automodule:: dogpile_caching
Directed Graphs
---------------
diff --git a/doc/build/orm/session.rst b/doc/build/orm/session.rst
index 320d2fc88..79fdebae0 100644
--- a/doc/build/orm/session.rst
+++ b/doc/build/orm/session.rst
@@ -351,7 +351,7 @@ Session Frequently Asked Questions
The :class:`.Session` is not designed to be a
global object from which everyone consults as a "registry" of objects.
That's more the job of a **second level cache**. SQLAlchemy provides
- a pattern for implementing second level caching using `Beaker <http://beaker.groovie.org/>`_,
+ a pattern for implementing second level caching using `dogpile.cache <http://dogpilecache.readthedocs.org/>`_,
via the :ref:`examples_caching` example.
* How can I get the :class:`~sqlalchemy.orm.session.Session` for a certain object ?
diff --git a/examples/beaker_caching/caching_query.py b/examples/beaker_caching/caching_query.py
deleted file mode 100644
index ae0c9c903..000000000
--- a/examples/beaker_caching/caching_query.py
+++ /dev/null
@@ -1,279 +0,0 @@
-"""caching_query.py
-
-Represent persistence structures which allow the usage of
-Beaker caching with SQLAlchemy.
-
-The three new concepts introduced here are:
-
- * CachingQuery - a Query subclass that caches and
- retrieves results in/from Beaker.
- * FromCache - a query option that establishes caching
- parameters on a Query
- * RelationshipCache - a variant of FromCache which is specific
- to a query invoked during a lazy load.
- * _params_from_query - extracts value parameters from
- a Query.
-
-The rest of what's here are standard SQLAlchemy and
-Beaker constructs.
-
-"""
-from sqlalchemy.orm.interfaces import MapperOption
-from sqlalchemy.orm.query import Query
-from sqlalchemy.sql import visitors
-
-class CachingQuery(Query):
- """A Query subclass which optionally loads full results from a Beaker
- cache region.
-
- The CachingQuery stores additional state that allows it to consult
- a Beaker cache before accessing the database:
-
- * A "region", which is a cache region argument passed to a
- Beaker CacheManager, specifies a particular cache configuration
- (including backend implementation, expiration times, etc.)
- * A "namespace", which is a qualifying name that identifies a
- group of keys within the cache. A query that filters on a name
- might use the name "by_name", a query that filters on a date range
- to a joined table might use the name "related_date_range".
-
- When the above state is present, a Beaker cache is retrieved.
-
- The "namespace" name is first concatenated with
- a string composed of the individual entities and columns the Query
- requests, i.e. such as ``Query(User.id, User.name)``.
-
- The Beaker cache is then loaded from the cache manager based
- on the region and composed namespace. The key within the cache
- itself is then constructed against the bind parameters specified
- by this query, which are usually literals defined in the
- WHERE clause.
-
- The FromCache and RelationshipCache mapper options below represent
- the "public" method of configuring this state upon the CachingQuery.
-
- """
-
- def __init__(self, manager, *args, **kw):
- self.cache_manager = manager
- Query.__init__(self, *args, **kw)
-
- def __iter__(self):
- """override __iter__ to pull results from Beaker
- if particular attributes have been configured.
-
- Note that this approach does *not* detach the loaded objects from
- the current session. If the cache backend is an in-process cache
- (like "memory") and lives beyond the scope of the current session's
- transaction, those objects may be expired. The method here can be
- modified to first expunge() each loaded item from the current
- session before returning the list of items, so that the items
- in the cache are not the same ones in the current Session.
-
- """
- if hasattr(self, '_cache_parameters'):
- return self.get_value(createfunc=lambda: list(Query.__iter__(self)))
- else:
- return Query.__iter__(self)
-
- def invalidate(self):
- """Invalidate the value represented by this Query."""
-
- cache, cache_key = _get_cache_parameters(self)
- cache.remove(cache_key)
-
- def get_value(self, merge=True, createfunc=None):
- """Return the value from the cache for this query.
-
- Raise KeyError if no value present and no
- createfunc specified.
-
- """
- cache, cache_key = _get_cache_parameters(self)
- ret = cache.get_value(cache_key, createfunc=createfunc)
- if merge:
- ret = self.merge_result(ret, load=False)
- return ret
-
- def set_value(self, value):
- """Set the value in the cache for this query."""
-
- cache, cache_key = _get_cache_parameters(self)
- cache.put(cache_key, value)
-
-def query_callable(manager, query_cls=CachingQuery):
- def query(*arg, **kw):
- return query_cls(manager, *arg, **kw)
- return query
-
-def _get_cache_parameters(query):
- """For a query with cache_region and cache_namespace configured,
- return the correspoinding Cache instance and cache key, based
- on this query's current criterion and parameter values.
-
- """
- if not hasattr(query, '_cache_parameters'):
- raise ValueError("This Query does not have caching parameters configured.")
-
- region, namespace, cache_key = query._cache_parameters
-
- namespace = _namespace_from_query(namespace, query)
-
- if cache_key is None:
- # cache key - the value arguments from this query's parameters.
- args = [str(x) for x in _params_from_query(query)]
- args.extend([str(query._limit), str(query._offset)])
- cache_key = " ".join(args)
-
- assert cache_key is not None, "Cache key was None !"
-
- # get cache
- cache = query.cache_manager.get_cache_region(namespace, region)
-
- # optional - hash the cache_key too for consistent length
- # import uuid
- # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
-
- return cache, cache_key
-
-def _namespace_from_query(namespace, query):
- # cache namespace - the token handed in by the
- # option + class we're querying against
- namespace = " ".join([namespace] + [str(x) for x in query._entities])
-
- # memcached wants this
- namespace = namespace.replace(' ', '_')
-
- return namespace
-
-def _set_cache_parameters(query, region, namespace, cache_key):
-
- if hasattr(query, '_cache_parameters'):
- region, namespace, cache_key = query._cache_parameters
- raise ValueError("This query is already configured "
- "for region %r namespace %r" %
- (region, namespace)
- )
- query._cache_parameters = region, namespace, cache_key
-
-class FromCache(MapperOption):
- """Specifies that a Query should load results from a cache."""
-
- propagate_to_loaders = False
-
- def __init__(self, region, namespace, cache_key=None):
- """Construct a new FromCache.
-
- :param region: the cache region. Should be a
- region configured in the Beaker CacheManager.
-
- :param namespace: the cache namespace. Should
- be a name uniquely describing the target Query's
- lexical structure.
-
- :param cache_key: optional. A string cache key
- that will serve as the key to the query. Use this
- if your query has a huge amount of parameters (such
- as when using in_()) which correspond more simply to
- some other identifier.
-
- """
- self.region = region
- self.namespace = namespace
- self.cache_key = cache_key
-
- def process_query(self, query):
- """Process a Query during normal loading operation."""
-
- _set_cache_parameters(query, self.region, self.namespace, self.cache_key)
-
-class RelationshipCache(MapperOption):
- """Specifies that a Query as called within a "lazy load"
- should load results from a cache."""
-
- propagate_to_loaders = True
-
- def __init__(self, region, namespace, attribute):
- """Construct a new RelationshipCache.
-
- :param region: the cache region. Should be a
- region configured in the Beaker CacheManager.
-
- :param namespace: the cache namespace. Should
- be a name uniquely describing the target Query's
- lexical structure.
-
- :param attribute: A Class.attribute which
- indicates a particular class relationship() whose
- lazy loader should be pulled from the cache.
-
- """
- self.region = region
- self.namespace = namespace
- self._relationship_options = {
- ( attribute.property.parent.class_, attribute.property.key ) : self
- }
-
- def process_query_conditionally(self, query):
- """Process a Query that is used within a lazy loader.
-
- (the process_query_conditionally() method is a SQLAlchemy
- hook invoked only within lazyload.)
-
- """
- if query._current_path:
- mapper, key = query._current_path[-2:]
-
- for cls in mapper.class_.__mro__:
- if (cls, key) in self._relationship_options:
- relationship_option = self._relationship_options[(cls, key)]
- _set_cache_parameters(
- query,
- relationship_option.region,
- relationship_option.namespace,
- None)
-
- def and_(self, option):
- """Chain another RelationshipCache option to this one.
-
- While many RelationshipCache objects can be specified on a single
- Query separately, chaining them together allows for a more efficient
- lookup during load.
-
- """
- self._relationship_options.update(option._relationship_options)
- return self
-
-
-def _params_from_query(query):
- """Pull the bind parameter values from a query.
-
- This takes into account any scalar attribute bindparam set up.
-
- E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
- would return [5, 7].
-
- """
- v = []
- def visit_bindparam(bind):
-
- if bind.key in query._params:
- value = query._params[bind.key]
- elif bind.callable:
- # lazyloader may dig a callable in here, intended
- # to late-evaluate params after autoflush is called.
- # convert to a scalar value.
- value = bind.callable()
- else:
- value = bind.value
-
- v.append(value)
-
- # TODO: this pulls the binds from the final compiled statement.
- # ideally, this would be a little more performant if it pulled
- # from query._criterion and others directly, however this would
- # need to be implemented not to miss anything, including
- # subqueries in the columns clause. See
- # http://stackoverflow.com/questions/9265900/sqlalchemy-how-to-traverse-bindparam-values-in-a-subquery/
- visitors.traverse(query.statement, {}, {'bindparam':visit_bindparam})
- return v
diff --git a/examples/beaker_caching/environment.py b/examples/beaker_caching/environment.py
deleted file mode 100644
index ccc625117..000000000
--- a/examples/beaker_caching/environment.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""environment.py
-
-Establish data / cache file paths, and configurations,
-bootstrap fixture data if necessary.
-
-"""
-import caching_query
-from sqlalchemy import create_engine
-from sqlalchemy.orm import scoped_session, sessionmaker
-from sqlalchemy.ext.declarative import declarative_base
-from beaker import cache
-import os
-
-# Beaker CacheManager. A home base for cache configurations.
-cache_manager = cache.CacheManager()
-
-# scoped_session. Apply our custom CachingQuery class to it,
-# using a callable that will associate the cache_manager
-# with the Query.
-Session = scoped_session(
- sessionmaker(
- query_cls=caching_query.query_callable(cache_manager)
- )
- )
-
-# global declarative base class.
-Base = declarative_base()
-
-
-root = "./beaker_data/"
-
-if not os.path.exists(root):
- raw_input("Will create datafiles in %r.\n"
- "To reset the cache + database, delete this directory.\n"
- "Press enter to continue.\n" % root
- )
- os.makedirs(root)
-
-dbfile = os.path.join(root, "beaker_demo.db")
-engine = create_engine('sqlite:///%s' % dbfile, echo=True)
-Session.configure(bind=engine)
-
-# configure the "default" cache region.
-cache_manager.regions['default'] ={
-
- # using type 'file' to illustrate
- # serialized persistence. In reality,
- # use memcached. Other backends
- # are much, much slower.
- 'type':'file',
- 'data_dir':root,
- 'expire':3600,
-
- # set start_time to current time
- # to re-cache everything
- # upon application startup
- #'start_time':time.time()
- }
-
-installed = False
-
-def bootstrap():
- global installed
- import fixture_data
- if not os.path.exists(dbfile):
- fixture_data.install()
- installed = True \ No newline at end of file
diff --git a/examples/beaker_caching/__init__.py b/examples/dogpile_caching/__init__.py
index 7e7b62791..00c386bda 100644
--- a/examples/beaker_caching/__init__.py
+++ b/examples/dogpile_caching/__init__.py
@@ -1,33 +1,38 @@
"""
-Illustrates how to embed Beaker cache functionality within
-the Query object, allowing full cache control as well as the
+Illustrates how to embed `dogpile.cache <http://dogpilecache.readthedocs.org/>`_
+functionality within
+the :class:`.Query` object, allowing full cache control as well as the
ability to pull "lazy loaded" attributes from long term cache
as well.
+.. versionchanged:: 0.8 The example was modernized to use
+ dogpile.cache, replacing Beaker as the caching library in
+ use.
+
In this demo, the following techniques are illustrated:
-* Using custom subclasses of Query
+* Using custom subclasses of :class:`.Query`
* Basic technique of circumventing Query to pull from a
custom cache source instead of the database.
-* Rudimental caching with Beaker, using "regions" which allow
+* Rudimental caching with dogpile.cache, using "regions" which allow
global control over a fixed set of configurations.
-* Using custom MapperOption objects to configure options on
+* Using custom :class:`.MapperOption` objects to configure options on
a Query, including the ability to invoke the options
deep within an object graph when lazy loads occur.
E.g.::
# query for Person objects, specifying cache
- q = Session.query(Person).options(FromCache("default", "all_people"))
+ q = Session.query(Person).options(FromCache("default"))
# specify that each Person's "addresses" collection comes from
# cache too
- q = q.options(RelationshipCache("default", "by_person", Person.addresses))
+ q = q.options(RelationshipCache(Person.addresses, "default"))
# query
print q.all()
-To run, both SQLAlchemy and Beaker (1.4 or greater) must be
+To run, both SQLAlchemy and dogpile.cache must be
installed or on the current PYTHONPATH. The demo will create a local
directory for datafiles, insert initial data, and run. Running the
demo a second time will utilize the cache files already present, and
@@ -37,23 +42,24 @@ pull from cache.
The demo scripts themselves, in order of complexity, are run as follows::
- python examples/beaker_caching/helloworld.py
+ python examples/dogpile_caching/helloworld.py
- python examples/beaker_caching/relationship_caching.py
+ python examples/dogpile_caching/relationship_caching.py
- python examples/beaker_caching/advanced.py
+ python examples/dogpile_caching/advanced.py
- python examples/beaker_caching/local_session_caching.py
+ python examples/dogpile_caching/local_session_caching.py
Listing of files:
- environment.py - Establish the Session, the Beaker cache
- manager, data / cache file paths, and configurations,
+ environment.py - Establish the Session, a dictionary
+ of "regions", a sample cache region against a .dbm
+ file, data / cache file paths, and configurations,
bootstrap fixture data if necessary.
caching_query.py - Represent functions and classes
- which allow the usage of Beaker caching with SQLAlchemy.
+ which allow the usage of Dogpile caching with SQLAlchemy.
Introduces a query option called FromCache.
model.py - The datamodel, which represents Person that has multiple
@@ -71,7 +77,7 @@ Listing of files:
techniques from the first two scripts.
local_session_caching.py - Grok everything so far ? This example
- creates a new Beaker container that will persist data in a dictionary
+ creates a new dogpile.cache backend that will persist data in a dictionary
which is local to the current session. remove() the session
and the cache is gone.
diff --git a/examples/beaker_caching/advanced.py b/examples/dogpile_caching/advanced.py
index 31beeff6f..6bfacfcf0 100644
--- a/examples/beaker_caching/advanced.py
+++ b/examples/dogpile_caching/advanced.py
@@ -35,7 +35,7 @@ def load_name_range(start, end, invalidate=False):
# have the "addresses" collection cached separately
# each lazyload of Person.addresses loads from cache.
- q = q.options(RelationshipCache("default", "by_person", Person.addresses))
+ q = q.options(RelationshipCache(Person.addresses, "default"))
# alternatively, eagerly load the "addresses" collection, so that they'd
# be cached together. This issues a bigger SQL statement and caches
diff --git a/examples/dogpile_caching/caching_query.py b/examples/dogpile_caching/caching_query.py
new file mode 100644
index 000000000..fb532fa63
--- /dev/null
+++ b/examples/dogpile_caching/caching_query.py
@@ -0,0 +1,255 @@
+"""caching_query.py
+
+Represent persistence structures which allow the usage of
+dogpile.cache caching with SQLAlchemy.
+
+The three new concepts introduced here are:
+
+ * CachingQuery - a Query subclass that caches and
+ retrieves results in/from dogpile.cache.
+ * FromCache - a query option that establishes caching
+ parameters on a Query
+ * RelationshipCache - a variant of FromCache which is specific
+ to a query invoked during a lazy load.
+ * _params_from_query - extracts value parameters from
+ a Query.
+
+The rest of what's here are standard SQLAlchemy and
+dogpile.cache constructs.
+
+"""
+from sqlalchemy.orm.interfaces import MapperOption
+from sqlalchemy.orm.query import Query
+from sqlalchemy.sql import visitors
+from dogpile.cache.api import NO_VALUE
+
+class CachingQuery(Query):
+ """A Query subclass which optionally loads full results from a dogpile
+ cache region.
+
+ The CachingQuery optionally stores additional state that allows it to consult
+ a dogpile.cache cache before accessing the database, in the form
+ of a FromCache or RelationshipCache object. Each of these objects
+ refer to the name of a :class:`dogpile.cache.Region` that's been configured
+ and stored in a lookup dictionary. When such an object has associated
+ itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region`
+ is used to locate a cached result. If none is present, then the
+ Query is invoked normally, the results being cached.
+
+ The FromCache and RelationshipCache mapper options below represent
+ the "public" method of configuring this state upon the CachingQuery.
+
+ """
+
+ def __init__(self, regions, *args, **kw):
+ self.cache_regions = regions
+ Query.__init__(self, *args, **kw)
+
+ def __iter__(self):
+ """override __iter__ to pull results from dogpile
+ if particular attributes have been configured.
+
+ Note that this approach does *not* detach the loaded objects from
+ the current session. If the cache backend is an in-process cache
+ (like "memory") and lives beyond the scope of the current session's
+ transaction, those objects may be expired. The method here can be
+ modified to first expunge() each loaded item from the current
+ session before returning the list of items, so that the items
+ in the cache are not the same ones in the current Session.
+
+ """
+ if hasattr(self, '_cache_region'):
+ return self.get_value(createfunc=lambda: list(Query.__iter__(self)))
+ else:
+ return Query.__iter__(self)
+
+ def _get_cache_plus_key(self):
+ """Return a cache region plus key."""
+
+ dogpile_region = self.cache_regions[self._cache_region.region]
+ if self._cache_region.cache_key:
+ key = self._cache_region.cache_key
+ else:
+ key = _key_from_query(self)
+ return dogpile_region, key
+
+ def invalidate(self):
+ """Invalidate the cache value represented by this Query."""
+
+ dogpile_region, cache_key = self._get_cache_plus_key()
+ dogpile_region.delete(cache_key)
+
+ def get_value(self, merge=True, createfunc=None,
+ expiration_time=None, ignore_expiration=False):
+ """Return the value from the cache for this query.
+
+ Raise KeyError if no value present and no
+ createfunc specified.
+
+ """
+ dogpile_region, cache_key = self._get_cache_plus_key()
+
+ # ignore_expiration means, if the value is in the cache
+ # but is expired, return it anyway. This doesn't make sense
+ # with createfunc, which says, if the value is expired, generate
+ # a new value.
+ assert not ignore_expiration or not createfunc, \
+ "Can't ignore expiration and also provide createfunc"
+
+ if ignore_expiration or not createfunc:
+ cached_value = dogpile_region.get(cache_key,
+ expiration_time=expiration_time,
+ ignore_expiration=ignore_expiration)
+ else:
+ cached_value = dogpile_region.get_or_create(
+ cache_key,
+ createfunc,
+ expiration_time=expiration_time
+ )
+ if cached_value is NO_VALUE:
+ raise KeyError(cache_key)
+ if merge:
+ cached_value = self.merge_result(cached_value, load=False)
+ return cached_value
+
+ def set_value(self, value):
+ """Set the value in the cache for this query."""
+
+ dogpile_region, cache_key = self._get_cache_plus_key()
+ dogpile_region.set(cache_key, value)
+
+def query_callable(regions, query_cls=CachingQuery):
+ def query(*arg, **kw):
+ return query_cls(regions, *arg, **kw)
+ return query
+
+def _key_from_query(query, qualifier=None):
+ """Given a Query, create a cache key.
+
+ There are many approaches to this; here we use the simplest,
+ which is to create an md5 hash of the text of the SQL statement,
+ combined with stringified versions of all the bound parameters
+ within it. There's a bit of a performance hit with
+ compiling out "query.statement" here; other approaches include
+ setting up an explicit cache key with a particular Query,
+ then combining that with the bound parameter values.
+
+ """
+
+ v = []
+ def visit_bindparam(bind):
+
+ if bind.key in query._params:
+ value = query._params[bind.key]
+ elif bind.callable:
+ value = bind.callable()
+ else:
+ value = bind.value
+
+ v.append(unicode(value))
+
+ stmt = query.statement
+ visitors.traverse(stmt, {}, {'bindparam': visit_bindparam})
+
+ # here we return the key as a long string. our "key mangler"
+ # set up with the region will boil it down to an md5.
+ return " ".join([unicode(stmt)] + v)
+
+class FromCache(MapperOption):
+ """Specifies that a Query should load results from a cache."""
+
+ propagate_to_loaders = False
+
+ def __init__(self, region="default", cache_key=None):
+ """Construct a new FromCache.
+
+ :param region: the cache region. Should be a
+ region configured in the dictionary of dogpile
+ regions.
+
+ :param cache_key: optional. A string cache key
+ that will serve as the key to the query. Use this
+ if your query has a huge amount of parameters (such
+ as when using in_()) which correspond more simply to
+ some other identifier.
+
+ """
+ self.region = region
+ self.cache_key = cache_key
+
+ def process_query(self, query):
+ """Process a Query during normal loading operation."""
+ query._cache_region = self
+
+class RelationshipCache(MapperOption):
+ """Specifies that a Query as called within a "lazy load"
+ should load results from a cache."""
+
+ propagate_to_loaders = True
+
+ def __init__(self, attribute, region="default"):
+ self.region = region
+ self.cls_ = attribute.property.parent.class_
+ self.key = attribute.property.key
+
+ def process_query_conditionally(self, query):
+ if query._current_path:
+ mapper, key = query._current_path[-2:]
+ if issubclass(mapper.class_, self.cls_) and \
+ key == self.key:
+ query._cache_region = self
+
+class RelationshipCache(MapperOption):
+ """Specifies that a Query as called within a "lazy load"
+ should load results from a cache."""
+
+ propagate_to_loaders = True
+
+ def __init__(self, attribute, region="default", cache_key=None):
+ """Construct a new RelationshipCache.
+
+ :param attribute: A Class.attribute which
+ indicates a particular class relationship() whose
+ lazy loader should be pulled from the cache.
+
+ :param region: name of the cache region.
+
+ :param cache_key: optional. A string cache key
+ that will serve as the key to the query, bypassing
+ the usual means of forming a key from the Query itself.
+
+ """
+ self.region = region
+ self.cache_key = cache_key
+ self._relationship_options = {
+ (attribute.property.parent.class_, attribute.property.key): self
+ }
+
+ def process_query_conditionally(self, query):
+ """Process a Query that is used within a lazy loader.
+
+ (the process_query_conditionally() method is a SQLAlchemy
+ hook invoked only within lazyload.)
+
+ """
+ if query._current_path:
+ mapper, key = query._current_path[-2:]
+
+ for cls in mapper.class_.__mro__:
+ if (cls, key) in self._relationship_options:
+ relationship_option = self._relationship_options[(cls, key)]
+ query._cache_region = relationship_option
+ break
+
+ def and_(self, option):
+ """Chain another RelationshipCache option to this one.
+
+ While many RelationshipCache objects can be specified on a single
+ Query separately, chaining them together allows for a more efficient
+ lookup during load.
+
+ """
+ self._relationship_options.update(option._relationship_options)
+ return self
+
+
diff --git a/examples/dogpile_caching/environment.py b/examples/dogpile_caching/environment.py
new file mode 100644
index 000000000..f210d26ac
--- /dev/null
+++ b/examples/dogpile_caching/environment.py
@@ -0,0 +1,83 @@
+"""environment.py
+
+Establish data / cache file paths, and configurations,
+bootstrap fixture data if necessary.
+
+"""
+import caching_query
+from sqlalchemy import create_engine
+from sqlalchemy.orm import scoped_session, sessionmaker
+from sqlalchemy.ext.declarative import declarative_base
+from dogpile.cache.region import make_region
+import os
+import md5
+
+# dogpile cache regions. A home base for cache configurations.
+regions = {}
+
+
+# scoped_session. Apply our custom CachingQuery class to it,
+# using a callable that will associate the dictionary
+# of regions with the Query.
+Session = scoped_session(
+ sessionmaker(
+ query_cls=caching_query.query_callable(regions)
+ )
+ )
+
+# global declarative base class.
+Base = declarative_base()
+
+root = "./dogpile_data/"
+
+if not os.path.exists(root):
+ raw_input("Will create datafiles in %r.\n"
+ "To reset the cache + database, delete this directory.\n"
+ "Press enter to continue.\n" % root
+ )
+ os.makedirs(root)
+
+dbfile = os.path.join(root, "dogpile_demo.db")
+engine = create_engine('sqlite:///%s' % dbfile, echo=True)
+Session.configure(bind=engine)
+
+
+def md5_key_mangler(key):
+ """Receive cache keys as long concatenated strings;
+ distill them into an md5 hash.
+
+ """
+ return md5.md5(key).hexdigest()
+
+# configure the "default" cache region.
+regions['default'] = make_region(
+ # the "dbm" backend needs
+ # string-encoded keys
+ key_mangler=md5_key_mangler
+ ).configure(
+ # using type 'file' to illustrate
+ # serialized persistence. Normally
+ # memcached or similar is a better choice
+ # for caching.
+ 'dogpile.cache.dbm',
+ expiration_time=3600,
+ arguments={
+ "filename": os.path.join(root, "cache.dbm")
+ }
+ )
+
+# optional; call invalidate() on the region
+# once created so that all data is fresh when
+# the app is restarted. Good for development,
+# on a production system needs to be used carefully
+# regions['default'].invalidate()
+
+
+installed = False
+
+def bootstrap():
+ global installed
+ import fixture_data
+ if not os.path.exists(dbfile):
+ fixture_data.install()
+ installed = True \ No newline at end of file
diff --git a/examples/beaker_caching/fixture_data.py b/examples/dogpile_caching/fixture_data.py
index b77bbcb95..1db75ea05 100644
--- a/examples/beaker_caching/fixture_data.py
+++ b/examples/dogpile_caching/fixture_data.py
@@ -16,8 +16,10 @@ def install():
('Chicago', 'United States', ('60601', '60602', '60603', '60604')),
('Montreal', 'Canada', ('H2S 3K9', 'H2B 1V4', 'H7G 2T8')),
('Edmonton', 'Canada', ('T5J 1R9', 'T5J 1Z4', 'T5H 1P6')),
- ('New York', 'United States', ('10001', '10002', '10003', '10004', '10005', '10006')),
- ('San Francisco', 'United States', ('94102', '94103', '94104', '94105', '94107', '94108'))
+ ('New York', 'United States',
+ ('10001', '10002', '10003', '10004', '10005', '10006')),
+ ('San Francisco', 'United States',
+ ('94102', '94103', '94104', '94105', '94107', '94108'))
]
countries = {}
@@ -38,7 +40,8 @@ def install():
"person %.2d" % i,
Address(
street="street %.2d" % i,
- postal_code=all_post_codes[random.randint(0, len(all_post_codes) - 1)]
+ postal_code=all_post_codes[
+ random.randint(0, len(all_post_codes) - 1)]
)
)
Session.add(person)
diff --git a/examples/beaker_caching/helloworld.py b/examples/dogpile_caching/helloworld.py
index 6f696c502..e2e4d4f78 100644
--- a/examples/beaker_caching/helloworld.py
+++ b/examples/dogpile_caching/helloworld.py
@@ -10,7 +10,7 @@ from caching_query import FromCache
# load Person objects. cache the result under the namespace "all_people".
print "loading people...."
-people = Session.query(Person).options(FromCache("default", "all_people")).all()
+people = Session.query(Person).options(FromCache("default")).all()
# remove the Session. next query starts from scratch.
Session.remove()
@@ -18,13 +18,13 @@ Session.remove()
# load again, using the same FromCache option. now they're cached
# under "all_people", no SQL is emitted.
print "loading people....again!"
-people = Session.query(Person).options(FromCache("default", "all_people")).all()
+people = Session.query(Person).options(FromCache("default")).all()
# want to load on some different kind of query ? change the namespace
# you send to FromCache
print "loading people two through twelve"
people_two_through_twelve = Session.query(Person).\
- options(FromCache("default", "people_on_range")).\
+ options(FromCache("default")).\
filter(Person.name.between("person 02", "person 12")).\
all()
@@ -34,7 +34,7 @@ people_two_through_twelve = Session.query(Person).\
# previous one, issues new SQL...
print "loading people five through fifteen"
people_five_through_fifteen = Session.query(Person).\
- options(FromCache("default", "people_on_range")).\
+ options(FromCache("default")).\
filter(Person.name.between("person 05", "person 15")).\
all()
@@ -42,7 +42,7 @@ people_five_through_fifteen = Session.query(Person).\
# ... but using the same params as are already cached, no SQL
print "loading people two through twelve...again!"
people_two_through_twelve = Session.query(Person).\
- options(FromCache("default", "people_on_range")).\
+ options(FromCache("default")).\
filter(Person.name.between("person 02", "person 12")).\
all()
@@ -52,9 +52,9 @@ people_two_through_twelve = Session.query(Person).\
# same list of objects to be loaded, and the same parameters in the
# same order, then call invalidate().
print "invalidating everything"
-Session.query(Person).options(FromCache("default", "all_people")).invalidate()
+Session.query(Person).options(FromCache("default")).invalidate()
Session.query(Person).\
- options(FromCache("default", "people_on_range")).\
+ options(FromCache("default")).\
filter(Person.name.between("person 02", "person 12")).invalidate()
Session.query(Person).\
options(FromCache("default", "people_on_range")).\
diff --git a/examples/beaker_caching/local_session_caching.py b/examples/dogpile_caching/local_session_caching.py
index 2d8035578..383b31c11 100644
--- a/examples/beaker_caching/local_session_caching.py
+++ b/examples/dogpile_caching/local_session_caching.py
@@ -1,6 +1,6 @@
"""local_session_caching.py
-Create a new Beaker cache type + a local region that will store
+Create a new Dogpile cache backend that will store
cached data local to the current Session.
This is an advanced example which assumes familiarity
@@ -8,11 +8,11 @@ with the basic operation of CachingQuery.
"""
-from beaker import cache, container
-import collections
+from dogpile.cache.api import CacheBackend, NO_VALUE
+from dogpile.cache.region import register_backend
-class ScopedSessionNamespace(container.MemoryNamespaceManager):
- """A Beaker cache type which will cache objects locally on
+class ScopedSessionBackend(CacheBackend):
+ """A dogpile backend which will cache objects locally on
the current session.
When used with the query_cache system, the effect is that the objects
@@ -26,50 +26,54 @@ class ScopedSessionNamespace(container.MemoryNamespaceManager):
"""
- def __init__(self, namespace, scoped_session, **kwargs):
- """__init__ is called by Beaker itself."""
+ def __init__(self, arguments):
+ self.scoped_session = arguments['scoped_session']
- container.NamespaceManager.__init__(self, namespace)
- self.scoped_session = scoped_session
+ def get(self, key):
+ return self._cache_dictionary.get(key, NO_VALUE)
- @classmethod
- def create_session_container(cls, beaker_name, scoped_session):
- """Create a new session container for a given scoped_session."""
+ def set(self, key, value):
+ self._cache_dictionary[key] = value
- def create_namespace(namespace, **kw):
- return cls(namespace, scoped_session, **kw)
- cache.clsmap[beaker_name] = create_namespace
+ def delete(self, key):
+ self._cache_dictionary.pop(key, None)
@property
- def dictionary(self):
- """Return the cache dictionary used by this MemoryNamespaceManager."""
+ def _cache_dictionary(self):
+ """Return the cache dictionary linked to the current Session."""
sess = self.scoped_session()
try:
- nscache = sess._beaker_cache
+ cache_dict = sess._cache_dictionary
except AttributeError:
- sess._beaker_cache = nscache = collections.defaultdict(dict)
- return nscache[self.namespace]
+ sess._cache_dictionary = cache_dict = {}
+ return cache_dict
+
+register_backend("sqlalchemy.session", __name__, "ScopedSessionBackend")
if __name__ == '__main__':
- from environment import Session, cache_manager
+ from environment import Session, regions
from caching_query import FromCache
-
- # create a Beaker container type called "ext:local_session".
- # it will reference the ScopedSession in meta.
- ScopedSessionNamespace.create_session_container("ext:local_session", Session)
-
- # set up a region based on this new container type.
- cache_manager.regions['local_session'] ={'type':'ext:local_session'}
+ from dogpile.cache import make_region
+
+ # set up a region based on the ScopedSessionBackend,
+ # pointing to the scoped_session declared in the example
+ # environment.
+ regions['local_session'] = make_region().configure(
+ 'sqlalchemy.session',
+ arguments={
+ "scoped_session": Session
+ }
+ )
from model import Person
# query to load Person by name, with criterion
# of "person 10"
q = Session.query(Person).\
- options(FromCache("local_session", "by_name")).\
- filter(Person.name=="person 10")
+ options(FromCache("local_session")).\
+ filter(Person.name == "person 10")
# load from DB
person10 = q.one()
@@ -77,7 +81,7 @@ if __name__ == '__main__':
# next call, the query is cached.
person10 = q.one()
- # clear out the Session. The "_beaker_cache" dictionary
+ # clear out the Session. The "_cache_dictionary" dictionary
# disappears with it.
Session.remove()
@@ -91,6 +95,5 @@ if __name__ == '__main__':
# that would change the results of a cached query, such as
# inserts, deletes, or modification to attributes that are
# part of query criterion, still require careful invalidation.
- from caching_query import _get_cache_parameters
- cache, key = _get_cache_parameters(q)
+ cache, key = q._get_cache_plus_key()
assert person10 is cache.get(key)[0]
diff --git a/examples/beaker_caching/model.py b/examples/dogpile_caching/model.py
index a6733962d..6f1cffedf 100644
--- a/examples/beaker_caching/model.py
+++ b/examples/dogpile_caching/model.py
@@ -96,11 +96,11 @@ class Person(Base):
# Caching options. A set of three RelationshipCache options
# which can be applied to Query(), causing the "lazy load"
# of these attributes to be loaded from cache.
-cache_address_bits = RelationshipCache("default", "byid", PostalCode.city).\
+cache_address_bits = RelationshipCache(PostalCode.city, "default").\
and_(
- RelationshipCache("default", "byid", City.country)
+ RelationshipCache(City.country, "default")
).and_(
- RelationshipCache("default", "byid", Address.postal_code)
+ RelationshipCache(Address.postal_code, "default")
)
bootstrap() \ No newline at end of file
diff --git a/examples/beaker_caching/relation_caching.py b/examples/dogpile_caching/relation_caching.py
index f1e5c7886..7a5779620 100644
--- a/examples/beaker_caching/relation_caching.py
+++ b/examples/dogpile_caching/relation_caching.py
@@ -6,7 +6,7 @@ term cache.
"""
from environment import Session, root
-from model import Person, Address, cache_address_bits
+from model import Person, cache_address_bits
from sqlalchemy.orm import joinedload
import os
@@ -16,9 +16,10 @@ for p in Session.query(Person).options(joinedload(Person.addresses), cache_addre
print "\n\nIf this was the first run of relationship_caching.py, SQL was likely emitted to "\
"load postal codes, cities, countries.\n"\
- "If run a second time, only a single SQL statement will run - all "\
+ "If run a second time, assuming the cache is still valid, "\
+ "only a single SQL statement will run - all "\
"related data is pulled from cache.\n"\
- "To clear the cache, delete the directory %r. \n"\
+ "To clear the cache, delete the file %r. \n"\
"This will cause a re-load of cities, postal codes and countries on "\
"the next run.\n"\
- % os.path.join(root, 'container_file')
+ % os.path.join(root, 'cache.dbm')