summaryrefslogtreecommitdiff
path: root/doc
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2014-01-21 20:10:23 -0500
committerMike Bayer <mike_mp@zzzcomputing.com>2014-01-21 20:10:23 -0500
commit07fb90c6cc14de6d02cf4be592c57d56831f59f7 (patch)
tree050ef65db988559c60f7aa40f2d0bfe24947e548 /doc
parent560fd1d5ed643a1b0f95296f3b840c1963bbe67f (diff)
parentee1f4d21037690ad996c5eacf7e1200e92f2fbaa (diff)
downloadsqlalchemy-ticket_2501.tar.gz
Merge branch 'master' into ticket_2501ticket_2501
Conflicts: lib/sqlalchemy/orm/mapper.py
Diffstat (limited to 'doc')
-rw-r--r--doc/build/builder/autodoc_mods.py50
-rw-r--r--doc/build/builder/changelog.py296
-rw-r--r--doc/build/builder/mako.py52
-rw-r--r--doc/build/builder/viewsource.py209
-rw-r--r--doc/build/changelog/changelog_01.rst27
-rw-r--r--doc/build/changelog/changelog_02.rst548
-rw-r--r--doc/build/changelog/changelog_03.rst35
-rw-r--r--doc/build/changelog/changelog_04.rst50
-rw-r--r--doc/build/changelog/changelog_05.rst10
-rw-r--r--doc/build/changelog/changelog_06.rst56
-rw-r--r--doc/build/changelog/changelog_07.rst137
-rw-r--r--doc/build/changelog/changelog_08.rst643
-rw-r--r--doc/build/changelog/changelog_09.rst1294
-rw-r--r--doc/build/changelog/migration_04.rst6
-rw-r--r--doc/build/changelog/migration_07.rst10
-rw-r--r--doc/build/changelog/migration_08.rst4
-rw-r--r--doc/build/changelog/migration_09.rst1149
-rw-r--r--doc/build/conf.py22
-rw-r--r--doc/build/copyright.rst4
-rw-r--r--doc/build/core/connections.rst6
-rw-r--r--doc/build/core/constraints.rst409
-rw-r--r--doc/build/core/ddl.rst287
-rw-r--r--doc/build/core/defaults.rst345
-rw-r--r--doc/build/core/dml.rst37
-rw-r--r--doc/build/core/engines.rst4
-rw-r--r--doc/build/core/event.rst5
-rw-r--r--doc/build/core/events.rst3
-rw-r--r--doc/build/core/exceptions.rst1
-rw-r--r--doc/build/core/expression_api.rst250
-rw-r--r--doc/build/core/functions.rst27
-rw-r--r--doc/build/core/index.rst3
-rw-r--r--doc/build/core/internals.rst10
-rw-r--r--doc/build/core/metadata.rst330
-rw-r--r--doc/build/core/pooling.rst63
-rw-r--r--doc/build/core/reflection.rst168
-rw-r--r--doc/build/core/schema.rst1462
-rw-r--r--doc/build/core/selectable.rst85
-rw-r--r--doc/build/core/sqlelement.rst140
-rw-r--r--doc/build/core/tutorial.rst26
-rw-r--r--doc/build/core/types.rst102
-rw-r--r--doc/build/dialects/drizzle.rst24
-rw-r--r--doc/build/dialects/index.rst35
-rw-r--r--doc/build/dialects/informix.rst11
-rw-r--r--doc/build/dialects/mssql.rst36
-rw-r--r--doc/build/dialects/mysql.rst66
-rw-r--r--doc/build/dialects/oracle.rst14
-rw-r--r--doc/build/dialects/postgresql.rst50
-rw-r--r--doc/build/faq.rst942
-rw-r--r--doc/build/glossary.rst641
-rw-r--r--doc/build/index.rst14
-rw-r--r--doc/build/intro.rst49
-rw-r--r--doc/build/orm/deprecated.rst2
-rw-r--r--doc/build/orm/events.rst2
-rw-r--r--doc/build/orm/examples.rst144
-rw-r--r--doc/build/orm/exceptions.rst1
-rw-r--r--doc/build/orm/extensions/associationproxy.rst1
-rw-r--r--doc/build/orm/extensions/automap.rst22
-rw-r--r--doc/build/orm/extensions/declarative.rst3
-rw-r--r--doc/build/orm/extensions/hybrid.rst2
-rw-r--r--doc/build/orm/extensions/instrumentation.rst3
-rw-r--r--doc/build/orm/extensions/mutable.rst5
-rw-r--r--doc/build/orm/inheritance.rst7
-rw-r--r--doc/build/orm/internals.rst39
-rw-r--r--doc/build/orm/loading.rst181
-rw-r--r--doc/build/orm/mapper_config.rst558
-rw-r--r--doc/build/orm/query.rst8
-rw-r--r--doc/build/orm/relationships.rst2
-rw-r--r--doc/build/orm/session.rst546
-rw-r--r--doc/build/orm/tutorial.rst627
-rw-r--r--doc/build/requirements.txt2
-rw-r--r--doc/build/static/docs.css36
-rw-r--r--doc/build/templates/genindex.mako2
-rw-r--r--doc/build/templates/layout.mako27
-rw-r--r--doc/build/templates/page.mako2
-rw-r--r--doc/build/templates/rtd_layout.mako164
-rw-r--r--doc/build/templates/search.mako2
-rw-r--r--doc/build/templates/static_base.mako9
-rw-r--r--doc/build/testdocs.py3
78 files changed, 8676 insertions, 3971 deletions
diff --git a/doc/build/builder/autodoc_mods.py b/doc/build/builder/autodoc_mods.py
index 576b4c339..93e2596be 100644
--- a/doc/build/builder/autodoc_mods.py
+++ b/doc/build/builder/autodoc_mods.py
@@ -9,6 +9,30 @@ def autodoc_skip_member(app, what, name, obj, skip, options):
else:
return skip
+
+_convert_modname = {
+ "sqlalchemy.sql.sqltypes": "sqlalchemy.types",
+ "sqlalchemy.sql.type_api": "sqlalchemy.types",
+ "sqlalchemy.sql.schema": "sqlalchemy.schema",
+ "sqlalchemy.sql.elements": "sqlalchemy.sql.expression",
+ "sqlalchemy.sql.selectable": "sqlalchemy.sql.expression",
+ "sqlalchemy.sql.dml": "sqlalchemy.sql.expression",
+ "sqlalchemy.sql.ddl": "sqlalchemy.schema",
+ "sqlalchemy.sql.base": "sqlalchemy.sql.expression"
+}
+
+_convert_modname_w_class = {
+ ("sqlalchemy.engine.interfaces", "Connectable"): "sqlalchemy.engine"
+}
+
+def _adjust_rendered_mod_name(modname, objname):
+ if modname in _convert_modname:
+ return _convert_modname[modname]
+ elif (modname, objname) in _convert_modname_w_class:
+ return _convert_modname_w_class[(modname, objname)]
+ else:
+ return modname
+
# im sure this is in the app somewhere, but I don't really
# know where, so we're doing it here.
_track_autodoced = {}
@@ -16,6 +40,24 @@ _inherited_names = set()
def autodoc_process_docstring(app, what, name, obj, options, lines):
if what == "class":
_track_autodoced[name] = obj
+
+ # need to translate module names for bases, others
+ # as we document lots of symbols in namespace modules
+ # outside of their source
+ bases = []
+ for base in obj.__bases__:
+ if base is not object:
+ bases.append(":class:`%s.%s`" % (
+ _adjust_rendered_mod_name(base.__module__, base.__name__),
+ base.__name__))
+
+ if bases:
+ lines[:0] = [
+ "Bases: %s" % (", ".join(bases)),
+ ""
+ ]
+
+
elif what in ("attribute", "method") and \
options.get("inherited-members"):
m = re.match(r'(.*?)\.([\w_]+)$', name)
@@ -35,15 +77,16 @@ def autodoc_process_docstring(app, what, name, obj, options, lines):
" *inherited from the* :%s:`~%s.%s.%s` *%s of* :class:`~%s.%s`" % (
"attr" if what == "attribute"
else "meth",
- supercls.__module__, supercls.__name__,
+ _adjust_rendered_mod_name(supercls.__module__, supercls.__name__),
+ supercls.__name__,
attrname,
what,
- supercls.__module__, supercls.__name__
+ _adjust_rendered_mod_name(supercls.__module__, supercls.__name__),
+ supercls.__name__
),
""
]
-from docutils import nodes
def missing_reference(app, env, node, contnode):
if node.attributes['reftarget'] in _inherited_names:
return node.children[0]
@@ -51,7 +94,6 @@ def missing_reference(app, env, node, contnode):
return None
-
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
app.connect('autodoc-process-docstring', autodoc_process_docstring)
diff --git a/doc/build/builder/changelog.py b/doc/build/builder/changelog.py
deleted file mode 100644
index 41a403ad0..000000000
--- a/doc/build/builder/changelog.py
+++ /dev/null
@@ -1,296 +0,0 @@
-import re
-from sphinx.util.compat import Directive
-from docutils.statemachine import StringList
-from docutils import nodes, utils
-import textwrap
-import itertools
-import collections
-import md5
-
-def _comma_list(text):
- return re.split(r"\s*,\s*", text.strip())
-
-def _parse_content(content):
- d = {}
- d['text'] = []
- idx = 0
- for line in content:
- idx += 1
- m = re.match(r' *\:(.+?)\:(?: +(.+))?', line)
- if m:
- attrname, value = m.group(1, 2)
- d[attrname] = value or ''
- else:
- break
- d["text"] = content[idx:]
- return d
-
-
-class EnvDirective(object):
- @property
- def env(self):
- return self.state.document.settings.env
-
-class ChangeLogDirective(EnvDirective, Directive):
- has_content = True
-
- type_ = "change"
-
- default_section = 'misc'
-
- def _organize_by_section(self, changes):
- compound_sections = [(s, s.split(" ")) for s in
- self.sections if " " in s]
-
- bysection = collections.defaultdict(list)
- all_sections = set()
- for rec in changes:
- inner_tag = rec['tags'].intersection(self.inner_tag_sort)
- if inner_tag:
- inner_tag = inner_tag.pop()
- else:
- inner_tag = ""
-
- for compound, comp_words in compound_sections:
- if rec['tags'].issuperset(comp_words):
- bysection[(compound, inner_tag)].append(rec)
- all_sections.add(compound)
- break
- else:
- intersect = rec['tags'].intersection(self.sections)
- if intersect:
- for sec in rec['sorted_tags']:
- if sec in intersect:
- bysection[(sec, inner_tag)].append(rec)
- all_sections.add(sec)
- break
- else:
- bysection[(self.default_section, inner_tag)].append(rec)
- return bysection, all_sections
-
- @classmethod
- def changes(cls, env):
- return env.temp_data['ChangeLogDirective_%s_changes' % cls.type_]
-
- def _setup_run(self):
- self.sections = self.env.config.changelog_sections
- self.inner_tag_sort = self.env.config.changelog_inner_tag_sort + [""]
- self.env.temp_data['ChangeLogDirective_%s_changes' % self.type_] = []
- self._parsed_content = _parse_content(self.content)
-
- p = nodes.paragraph('', '',)
- self.state.nested_parse(self.content[1:], 0, p)
-
- def run(self):
- self._setup_run()
- changes = self.changes(self.env)
- output = []
-
- self.version = version = self._parsed_content.get('version', '')
- id_prefix = "%s-%s" % (self.type_, version)
- topsection = self._run_top(id_prefix)
- output.append(topsection)
-
- bysection, all_sections = self._organize_by_section(changes)
-
- counter = itertools.count()
-
- sections_to_render = [s for s in self.sections if s in all_sections]
- if not sections_to_render:
- for cat in self.inner_tag_sort:
- append_sec = self._append_node()
-
- for rec in bysection[(self.default_section, cat)]:
- rec["id"] = "%s-%s" % (id_prefix, next(counter))
-
- self._render_rec(rec, None, cat, append_sec)
-
- if append_sec.children:
- topsection.append(append_sec)
- else:
- for section in sections_to_render + [self.default_section]:
- sec = nodes.section('',
- nodes.title(section, section),
- ids=["%s-%s" % (id_prefix, section.replace(" ", "-"))]
- )
-
- append_sec = self._append_node()
- sec.append(append_sec)
-
- for cat in self.inner_tag_sort:
- for rec in bysection[(section, cat)]:
- rec["id"] = "%s-%s" % (id_prefix, next(counter))
- self._render_rec(rec, section, cat, append_sec)
-
- if append_sec.children:
- topsection.append(sec)
-
- return output
-
- def _append_node(self):
- return nodes.bullet_list()
-
- def _run_top(self, id_prefix):
- version = self._parsed_content.get('version', '')
- topsection = nodes.section('',
- nodes.title(version, version),
- ids=[id_prefix]
- )
-
- if self._parsed_content.get("released"):
- topsection.append(nodes.Text("Released: %s" %
- self._parsed_content['released']))
- else:
- topsection.append(nodes.Text("no release date"))
-
- intro_para = nodes.paragraph('', '')
- for len_, text in enumerate(self._parsed_content['text']):
- if ".. change::" in text:
- break
- if len_:
- self.state.nested_parse(self._parsed_content['text'][0:len_], 0,
- intro_para)
- topsection.append(intro_para)
-
- return topsection
-
-
- def _render_rec(self, rec, section, cat, append_sec):
- para = rec['node'].deepcopy()
-
- text = _text_rawsource_from_node(para)
-
- to_hash = "%s %s" % (self.version, text[0:100])
- targetid = "%s-%s" % (self.type_,
- md5.md5(to_hash.encode('ascii', 'ignore')
- ).hexdigest())
- targetnode = nodes.target('', '', ids=[targetid])
- para.insert(0, targetnode)
- permalink = nodes.reference('', '',
- nodes.Text("(link)", "(link)"),
- refid=targetid,
- classes=['changeset-link']
- )
- para.append(permalink)
-
- insert_ticket = nodes.paragraph('')
- para.append(insert_ticket)
-
- i = 0
- for collection, render, prefix in (
- (rec['tickets'], self.env.config.changelog_render_ticket, "#%s"),
- (rec['pullreq'], self.env.config.changelog_render_pullreq,
- "pull request %s"),
- (rec['changeset'], self.env.config.changelog_render_changeset, "r%s"),
- ):
- for refname in collection:
- if i > 0:
- insert_ticket.append(nodes.Text(", ", ", "))
- else:
- insert_ticket.append(nodes.Text(" ", " "))
- i += 1
- if render is not None:
- refuri = render % refname
- node = nodes.reference('', '',
- nodes.Text(prefix % refname, prefix % refname),
- refuri=refuri
- )
- else:
- node = nodes.Text(prefix % refname, prefix % refname)
- insert_ticket.append(node)
-
- if rec['tags']:
- tag_node = nodes.strong('',
- " ".join("[%s]" % t for t
- in
- [t1 for t1 in [section, cat]
- if t1 in rec['tags']] +
-
- list(rec['tags'].difference([section, cat]))
- ) + " "
- )
- para.children[0].insert(0, tag_node)
-
- append_sec.append(
- nodes.list_item('',
- nodes.target('', '', ids=[rec['id']]),
- para
- )
- )
-
-
-class ChangeDirective(EnvDirective, Directive):
- has_content = True
-
- type_ = "change"
- parent_cls = ChangeLogDirective
-
- def run(self):
- content = _parse_content(self.content)
- p = nodes.paragraph('', '',)
- sorted_tags = _comma_list(content.get('tags', ''))
- rec = {
- 'tags': set(sorted_tags).difference(['']),
- 'tickets': set(_comma_list(content.get('tickets', ''))).difference(['']),
- 'pullreq': set(_comma_list(content.get('pullreq', ''))).difference(['']),
- 'changeset': set(_comma_list(content.get('changeset', ''))).difference(['']),
- 'node': p,
- 'type': self.type_,
- "title": content.get("title", None),
- 'sorted_tags': sorted_tags
- }
-
- if "declarative" in rec['tags']:
- rec['tags'].add("orm")
-
- self.state.nested_parse(content['text'], 0, p)
- self.parent_cls.changes(self.env).append(rec)
-
- return []
-
-def _text_rawsource_from_node(node):
- src = []
- stack = [node]
- while stack:
- n = stack.pop(0)
- if isinstance(n, nodes.Text):
- src.append(n.rawsource)
- stack.extend(n.children)
- return "".join(src)
-
-def _rst2sphinx(text):
- return StringList(
- [line.strip() for line in textwrap.dedent(text).split("\n")]
- )
-
-
-def make_ticket_link(name, rawtext, text, lineno, inliner,
- options={}, content=[]):
- env = inliner.document.settings.env
- render_ticket = env.config.changelog_render_ticket or "%s"
- prefix = "#%s"
- if render_ticket:
- ref = render_ticket % text
- node = nodes.reference(rawtext, prefix % text, refuri=ref, **options)
- else:
- node = nodes.Text(prefix % text, prefix % text)
- return [node], []
-
-def setup(app):
- app.add_directive('changelog', ChangeLogDirective)
- app.add_directive('change', ChangeDirective)
- app.add_config_value("changelog_sections", [], 'env')
- app.add_config_value("changelog_inner_tag_sort", [], 'env')
- app.add_config_value("changelog_render_ticket",
- None,
- 'env'
- )
- app.add_config_value("changelog_render_pullreq",
- None,
- 'env'
- )
- app.add_config_value("changelog_render_changeset",
- None,
- 'env'
- )
- app.add_role('ticket', make_ticket_link)
diff --git a/doc/build/builder/mako.py b/doc/build/builder/mako.py
index 8003ed417..0367bf018 100644
--- a/doc/build/builder/mako.py
+++ b/doc/build/builder/mako.py
@@ -23,49 +23,31 @@ class MakoBridge(TemplateBridge):
)
if rtd:
+ # RTD layout, imported from sqlalchemy.org
import urllib2
- template_url = builder.config['site_base'] + "/docs_base.mako"
- template = urllib2.urlopen(template_url).read()
- self.lookup.put_string("/rtd_base.mako", template)
+ template = urllib2.urlopen(builder.config['site_base'] + "/docs_adapter.mako").read()
+ self.lookup.put_string("docs_adapter.mako", template)
+
+ setup_ctx = urllib2.urlopen(builder.config['site_base'] + "/docs_adapter.py").read()
+ lcls = {}
+ exec(setup_ctx, lcls)
+ self.setup_ctx = lcls['setup_context']
+
+ def setup_ctx(self, context):
+ pass
def render(self, template, context):
template = template.replace(".html", ".mako")
context['prevtopic'] = context.pop('prev', None)
context['nexttopic'] = context.pop('next', None)
- # RTD layout
- if rtd:
- # add variables if not present, such
- # as if local test of READTHEDOCS variable
- if 'MEDIA_URL' not in context:
- context['MEDIA_URL'] = "http://media.readthedocs.org/"
- if 'slug' not in context:
- context['slug'] = context['project'].lower()
- if 'url' not in context:
- context['url'] = "/some/test/url"
- if 'current_version' not in context:
- context['current_version'] = "latest"
-
- if 'name' not in context:
- context['name'] = context['project'].lower()
-
- context['rtd'] = True
- context['toolbar'] = True
- context['layout'] = "rtd_layout.mako"
- context['base'] = "rtd_base.mako"
-
- context['pdf_url'] = "%spdf/%s/%s/%s.pdf" % (
- context['MEDIA_URL'],
- context['slug'],
- context['current_version'],
- context['slug']
- )
# local docs layout
- else:
- context['rtd'] = False
- context['toolbar'] = False
- context['layout'] = "layout.mako"
- context['base'] = "static_base.mako"
+ context['rtd'] = False
+ context['toolbar'] = False
+ context['base'] = "static_base.mako"
+
+ # override context attributes
+ self.setup_ctx(context)
context.setdefault('_', lambda x: x)
return self.lookup.get_template(template).render_unicode(**context)
diff --git a/doc/build/builder/viewsource.py b/doc/build/builder/viewsource.py
new file mode 100644
index 000000000..3f6b8263a
--- /dev/null
+++ b/doc/build/builder/viewsource.py
@@ -0,0 +1,209 @@
+from docutils import nodes
+from sphinx.ext.viewcode import collect_pages
+from sphinx.pycode import ModuleAnalyzer
+import imp
+from sphinx import addnodes
+import re
+from sphinx.util.compat import Directive
+import os
+from docutils.statemachine import StringList
+from sphinx.environment import NoUri
+
+import sys
+
+py2k = sys.version_info < (3, 0)
+if py2k:
+ text_type = unicode
+else:
+ text_type = str
+
+def view_source(name, rawtext, text, lineno, inliner,
+ options={}, content=[]):
+
+ env = inliner.document.settings.env
+
+ node = _view_source_node(env, text, None)
+ return [node], []
+
+def _view_source_node(env, text, state):
+ # pretend we're using viewcode fully,
+ # install the context it looks for
+ if not hasattr(env, '_viewcode_modules'):
+ env._viewcode_modules = {}
+
+ modname = text
+ text = modname.split(".")[-1] + ".py"
+
+ # imitate sphinx .<modname> syntax
+ if modname.startswith("."):
+ # see if the modname needs to be corrected in terms
+ # of current module context
+ base_module = env.temp_data.get('autodoc:module')
+ if base_module is None:
+ base_module = env.temp_data.get('py:module')
+
+ if base_module:
+ modname = base_module + modname
+
+ urito = env.app.builder.get_relative_uri
+
+ # we're showing code examples which may have dependencies
+ # which we really don't want to have required so load the
+ # module by file, not import (though we are importing)
+ # the top level module here...
+ pathname = None
+ for token in modname.split("."):
+ file_, pathname, desc = imp.find_module(token, [pathname] if pathname else None)
+ if file_:
+ file_.close()
+
+ # unlike viewcode which silently traps exceptions,
+ # I want this to totally barf if the file can't be loaded.
+ # a failed build better than a complete build missing
+ # key content
+ analyzer = ModuleAnalyzer.for_file(pathname, modname)
+ # copied from viewcode
+ analyzer.find_tags()
+ if not isinstance(analyzer.code, text_type):
+ code = analyzer.code.decode(analyzer.encoding)
+ else:
+ code = analyzer.code
+
+ if state is not None:
+ docstring = _find_mod_docstring(analyzer)
+ if docstring:
+ # get rid of "foo.py" at the top
+ docstring = re.sub(r"^[a-zA-Z_0-9]+\.py", "", docstring)
+
+ # strip
+ docstring = docstring.strip()
+
+ # yank only first paragraph
+ docstring = docstring.split("\n\n")[0].strip()
+ else:
+ docstring = None
+
+ entry = code, analyzer.tags, {}
+ env._viewcode_modules[modname] = entry
+ pagename = '_modules/' + modname.replace('.', '/')
+
+ try:
+ refuri = urito(env.docname, pagename)
+ except NoUri:
+ # if we're in the latex builder etc., this seems
+ # to be what we get
+ refuri = None
+
+
+ if docstring:
+ # embed the ref with the doc text so that it isn't
+ # a separate paragraph
+ if refuri:
+ docstring = "`%s <%s>`_ - %s" % (text, refuri, docstring)
+ else:
+ docstring = "``%s`` - %s" % (text, docstring)
+ para = nodes.paragraph('', '')
+ state.nested_parse(StringList([docstring]), 0, para)
+ return_node = para
+ else:
+ if refuri:
+ refnode = nodes.reference('', '',
+ nodes.Text(text, text),
+ refuri=urito(env.docname, pagename)
+ )
+ else:
+ refnode = nodes.Text(text, text)
+
+ if state:
+ return_node = nodes.paragraph('', '', refnode)
+ else:
+ return_node = refnode
+
+ return return_node
+
+from sphinx.pycode.pgen2 import token
+
+def _find_mod_docstring(analyzer):
+ """attempt to locate the module-level docstring.
+
+ Note that sphinx autodoc just uses ``__doc__``. But we don't want
+ to import the module, so we need to parse for it.
+
+ """
+ analyzer.tokenize()
+ for type_, parsed_line, start_pos, end_pos, raw_line in analyzer.tokens:
+ if type_ == token.COMMENT:
+ continue
+ elif type_ == token.STRING:
+ return eval(parsed_line)
+ else:
+ return None
+
+def _parse_content(content):
+ d = {}
+ d['text'] = []
+ idx = 0
+ for line in content:
+ idx += 1
+ m = re.match(r' *\:(.+?)\:(?: +(.+))?', line)
+ if m:
+ attrname, value = m.group(1, 2)
+ d[attrname] = value or ''
+ else:
+ break
+ d["text"] = content[idx:]
+ return d
+
+def _comma_list(text):
+ return re.split(r"\s*,\s*", text.strip())
+
+class AutoSourceDirective(Directive):
+ has_content = True
+
+ def run(self):
+ content = _parse_content(self.content)
+
+
+ env = self.state.document.settings.env
+ self.docname = env.docname
+
+ sourcefile = self.state.document.current_source.split(":")[0]
+ dir_ = os.path.dirname(sourcefile)
+ files = [
+ f for f in os.listdir(dir_) if f.endswith(".py")
+ and f != "__init__.py"
+ ]
+
+ if "files" in content:
+ # ordered listing of files to include
+ files = [fname for fname in _comma_list(content["files"])
+ if fname in set(files)]
+
+ node = nodes.paragraph('', '',
+ nodes.Text("Listing of files:", "Listing of files:")
+ )
+
+ bullets = nodes.bullet_list()
+ for fname in files:
+ modname, ext = os.path.splitext(fname)
+ # relative lookup
+ modname = "." + modname
+
+ link = _view_source_node(env, modname, self.state)
+
+ list_node = nodes.list_item('',
+ link
+ )
+ bullets += list_node
+
+ node += bullets
+
+ return [node]
+
+def setup(app):
+ app.add_role('viewsource', view_source)
+
+ app.add_directive('autosource', AutoSourceDirective)
+
+ # from sphinx.ext.viewcode
+ app.connect('html-collect-pages', collect_pages)
diff --git a/doc/build/changelog/changelog_01.rst b/doc/build/changelog/changelog_01.rst
index 156599a13..0f66f99e4 100644
--- a/doc/build/changelog/changelog_01.rst
+++ b/doc/build/changelog/changelog_01.rst
@@ -182,7 +182,7 @@
:tickets:
added a "mods" system which allows pluggable modules that modify/augment
- core functionality, using the function "install_mods(*modnames)".
+ core functionality, using the function "install_mods(\*modnames)".
.. change::
:tags:
@@ -421,7 +421,7 @@
:tags:
:tickets:
- added *args, **kwargs pass-thru to engine.transaction(func) allowing easier
+ added \*args, \**kwargs pass-thru to engine.transaction(func) allowing easier
creation of transactionalizing decorator functions
.. change::
@@ -520,7 +520,7 @@
create_engine() now uses genericized parameters; host/hostname,
db/dbname/database, password/passwd, etc. for all engine connections. makes
- engine URIs much more "universal"
+ engine URIs much more "universal"
.. change::
:tags:
@@ -672,7 +672,7 @@
:tags:
:tickets:
- added 'get_session().invalidate(*obj)' method to objectstore, instances will
+ added 'get_session().invalidate(\*obj)' method to objectstore, instances will
refresh() themselves upon the next attribute access.
.. change::
@@ -805,7 +805,7 @@
:tags:
:tickets:
- added "refresh(*obj)" method to objectstore / Session to reload the attributes of
+ added "refresh(\*obj)" method to objectstore / Session to reload the attributes of
any set of objects from the database unconditionally
.. changelog::
@@ -856,14 +856,15 @@
two issues related to postgres, which doesnt want to give you the "lastrowid"
since oids are deprecated:
- * postgres database-side defaults that are on primary key cols *do* execute
- explicitly beforehand, even though thats not the idea of a PassiveDefault. this is
- because sequences on columns get reflected as PassiveDefaults, but need to be explicitly
- executed on a primary key col so we know what we just inserted.
- * if you did add a row that has a bunch of database-side defaults on it,
- and the PassiveDefault thing was working the old way, i.e. they just execute on
- the DB side, the "cant get the row back without an OID" exception that occurred
- also will not happen unless someone (usually the ORM) explicitly asks for it.
+
+ * postgres database-side defaults that are on primary key cols *do* execute
+ explicitly beforehand, even though thats not the idea of a PassiveDefault. this is
+ because sequences on columns get reflected as PassiveDefaults, but need to be explicitly
+ executed on a primary key col so we know what we just inserted.
+ * if you did add a row that has a bunch of database-side defaults on it,
+ and the PassiveDefault thing was working the old way, i.e. they just execute on
+ the DB side, the "cant get the row back without an OID" exception that occurred
+ also will not happen unless someone (usually the ORM) explicitly asks for it.
.. change::
:tags:
diff --git a/doc/build/changelog/changelog_02.rst b/doc/build/changelog/changelog_02.rst
index 600dcc6eb..c3b91f1bd 100644
--- a/doc/build/changelog/changelog_02.rst
+++ b/doc/build/changelog/changelog_02.rst
@@ -3,14 +3,14 @@
0.2 Changelog
==============
-
+
.. changelog::
:version: 0.2.8
:released: Tue Sep 05 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
cleanup on connection methods + documentation. custom DBAPI
arguments specified in query string, 'connect_args' argument
@@ -18,7 +18,7 @@
function to 'create_engine'.
.. change::
- :tags:
+ :tags:
:tickets: 274
added "recycle" argument to Pool, is "pool_recycle" on create_engine,
@@ -27,7 +27,7 @@
stale connections
.. change::
- :tags:
+ :tags:
:tickets: 121
changed "invalidate" semantics with pooled connection; will
@@ -39,28 +39,28 @@
the connecting application
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
eesh ! the tutorial doctest was broken for quite some time.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
add_property() method on mapper does a "compile all mappers"
step in case the given property references a non-compiled mapper
(as it did in the case of the tutorial !)
.. change::
- :tags:
+ :tags:
:tickets: 277
check for pg sequence already existing before create
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
if a contextual session is established via MapperExtension.get_session
(as it is using the sessioncontext plugin, etc), a lazy load operation
@@ -68,24 +68,24 @@
persistent with a session already.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
lazy loads will not fire off for an object that does not have a
database identity (why?
see http://www.sqlalchemy.org/trac/wiki/WhyDontForeignKeysLoadData)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
unit-of-work does a better check for "orphaned" objects that are
part of a "delete-orphan" cascade, for certain conditions where the
parent isnt available to cascade from.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
mappers can tell if one of their objects is an "orphan" based
on interactions with the attribute package. this check is based
@@ -93,22 +93,22 @@
when objects are attached and detached from each other.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
it is now invalid to declare a self-referential relationship with
"delete-orphan" (as the abovementioned check would make them impossible
to save)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
improved the check for objects being part of a session when the
unit of work seeks to flush() them as part of a relationship..
.. change::
- :tags:
+ :tags:
:tickets: 280
statement execution supports using the same BindParam
@@ -116,14 +116,14 @@
parameters. nice job by Bill Noon figuring out the basic idea.
.. change::
- :tags:
+ :tags:
:tickets: 60, 71
postgres reflection moved to use pg_schema tables, can be overridden
with use_information_schema=True argument to create_engine.
.. change::
- :tags:
+ :tags:
:tickets: 155
added case_sensitive argument to MetaData, Table, Column, determines
@@ -138,27 +138,27 @@
work with
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
unit tests updated to run without any pysqlite installed; pool
test uses a mock DBAPI
.. change::
- :tags:
+ :tags:
:tickets: 281
urls support escaped characters in passwords
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added limit/offset to UNION queries (though not yet in oracle)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added "timezone=True" flag to DateTime and Time types. postgres
so far will convert this to "TIME[STAMP] (WITH|WITHOUT) TIME ZONE",
@@ -167,26 +167,26 @@
against datetimes that dont).
.. change::
- :tags:
+ :tags:
:tickets: 287
- fix to using query.count() with distinct, **kwargs with SelectResults
+ fix to using query.count() with distinct, \**kwargs with SelectResults
count()
.. change::
- :tags:
+ :tags:
:tickets: 289
deregister Table from MetaData when autoload fails;
.. change::
- :tags:
+ :tags:
:tickets: 293
import of py2.5s sqlite3
.. change::
- :tags:
+ :tags:
:tickets: 296
unicode fix for startswith()/endswith()
@@ -196,32 +196,32 @@
:released: Sat Aug 12 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
quoting facilities set up so that database-specific quoting can be
turned on for individual table, schema, and column identifiers when
used in all queries/creates/drops. Enabled via "quote=True" in
Table or Column, as well as "quote_schema=True" in Table. Thanks to
- Aaron Spike for his excellent efforts.
+ Aaron Spike for the excellent efforts.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
assignmapper was setting is_primary=True, causing all sorts of mayhem
by not raising an error when redundant mappers were set up, fixed
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added allow_null_pks option to Mapper, allows rows where some
primary key columns are null (i.e. when mapping to outer joins etc)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
modifcation to unitofwork to not maintain ordering within the
"new" list or within the UOWTask "objects" list; instead, new objects
@@ -233,48 +233,48 @@
sort) dont have to worry about maintaining order (which they werent anyway)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed reflection of foreign keys to autoload the referenced table
if it was not loaded already
.. change::
- :tags:
+ :tags:
:tickets: 256
- pass URL query string arguments to connect() function
.. change::
- :tags:
+ :tags:
:tickets: 257
- oracle boolean type
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
custom primary/secondary join conditions in a relation *will* be propagated
to backrefs by default. specifying a backref() will override this behavior.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
better check for ambiguous join conditions in sql.Join; propagates to a
better error message in PropertyLoader (i.e. relation()/backref()) for when
the join condition can't be reasonably determined.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
sqlite creates ForeignKeyConstraint objects properly upon table
reflection.
.. change::
- :tags:
+ :tags:
:tickets: 224
adjustments to pool stemming from changes made for.
@@ -282,65 +282,65 @@
succeeded. added a test script to attempt testing this.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed mysql reflection of default values to be PassiveDefault
.. change::
- :tags:
+ :tags:
:tickets: 263, 264
added reflected 'tinyint', 'mediumint' type to MS-SQL.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
SingletonThreadPool has a size and does a cleanup pass, so that
only a given number of thread-local connections stay around (needed
for sqlite applications that dispose of threads en masse)
.. change::
- :tags:
+ :tags:
:tickets: 267, 265
fixed small pickle bug(s) with lazy loaders
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed possible error in mysql reflection where certain versions
return an array instead of string for SHOW CREATE TABLE call
.. change::
- :tags:
+ :tags:
:tickets: 1770
fix to lazy loads when mapping to joins
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
all create()/drop() calls have a keyword argument of "connectable".
"engine" is deprecated.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed ms-sql connect() to work with adodbapi
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added "nowait" flag to Select()
.. change::
- :tags:
+ :tags:
:tickets: 271
inheritance check uses issubclass() instead of direct __mro__ check
@@ -348,27 +348,27 @@
flexibly correspond to class inheritance
.. change::
- :tags:
+ :tags:
:tickets: 252
SelectResults will use a subselect, when calling an aggregate (i.e.
max, min, etc.) on a SelectResults that has an ORDER BY clause
.. change::
- :tags:
+ :tags:
:tickets: 269
fixes to types so that database-specific types more easily used;
fixes to mysql text types to work with this methodology
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
some fixes to sqlite date type organization
.. change::
- :tags:
+ :tags:
:tickets: 263
added MSTinyInteger to MS-SQL
@@ -378,7 +378,7 @@
:released: Thu Jul 20 2006
.. change::
- :tags:
+ :tags:
:tickets: 76
big overhaul to schema to allow truly composite primary and foreign
@@ -389,73 +389,73 @@
and reflection is now more table oriented rather than column oriented.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to MapperExtension calling scheme, wasnt working very well
previously
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
tweaks to ActiveMapper, supports self-referential relationships
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
slight rearrangement to objectstore (in activemapper/threadlocal)
so that the SessionContext is referenced by '.context' instead
of subclassed directly.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
activemapper will use threadlocal's objectstore if the mod is
activated when activemapper is imported
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small fix to URL regexp to allow filenames with '@' in them
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixes to Session expunge/update/etc...needs more cleanup.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
select_table mappers *still* werent always compiling
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed up Boolean datatype
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added count()/count_by() to list of methods proxied by assignmapper;
this also adds them to activemapper
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
connection exceptions wrapped in DBAPIError
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
ActiveMapper now supports autoloading column definitions from the
database if you supply a __autoload__ = True attribute in your
@@ -463,43 +463,43 @@
any relationships.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
deferred column load could screw up the connection status in
a flush() under some circumstances, this was fixed
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
expunge() was not working with cascade, fixed.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
potential endless loop in cascading operations fixed.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added "synonym()" function, applied to properties to have a
propname the same as another, for the purposes of overriding props
and allowing the original propname to be accessible in select_by().
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fix to typing in clause construction which specifically helps
type issues with polymorphic_union (CAST/ColumnClause propagates
its type to proxy columns)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
mapper compilation work ongoing, someday it'll work....moved
around the initialization of MapperProperty objects to be after
@@ -508,34 +508,34 @@
aware of their "inherited" status if so.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
eager loads explicitly disallowed on self-referential relationships, or
relationships to an inheriting mapper (which is also self-referential)
.. change::
- :tags:
+ :tags:
:tickets: 244
reduced bind param size in query._get to appease the picky oracle
.. change::
- :tags:
+ :tags:
:tickets: 234
added 'checkfirst' argument to table.create()/table.drop(), as
well as table.exists()
.. change::
- :tags:
+ :tags:
:tickets: 245
some other ongoing fixes to inheritance
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
attribute/backref/orphan/history-tracking tweaks as usual...
@@ -544,54 +544,54 @@
:released: Sat Jul 08 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed endless loop bug in select_by(), if the traversal hit
two mappers that referenced each other
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
upgraded all unittests to insert './lib/' into sys.path,
working around new setuptools PYTHONPATH-killing behavior
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
further fixes with attributes/dependencies/etc....
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
improved error handling for when DynamicMetaData is not connected
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
MS-SQL support largely working (tested with pymssql)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
ordering of UPDATE and DELETE statements within groups is now
in order of primary key values, for more deterministic ordering
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
after_insert/delete/update mapper extensions now called per object,
not per-object-per-table
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
further fixes/refactorings to mapper compilation
@@ -600,30 +600,30 @@
:released: Tue Jun 27 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
try/except when the mapper sets init.__name__ on a mapped class,
supports python 2.3
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed bug where threadlocal engine would still autocommit
despite a transaction in progress
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
lazy load and deferred load operations require the parent object
to be in a Session to do the operation; whereas before the operation
would just return a blank list or None, it now raises an exception.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
Session.update() is slightly more lenient if the session to which
the given object was formerly attached to was garbage collected;
@@ -631,19 +631,19 @@
the previous Session.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixes to mapper compilation, checking for more error conditions
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small fix to eager loading combined with ordering/limit/offset
.. change::
- :tags:
+ :tags:
:tickets: 206
utterly remarkable: added a single space between 'CREATE TABLE'
@@ -651,28 +651,28 @@
reserved word tablename.....*
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
more fixes to inheritance, related to many-to-many relations
properly saving
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed bug when specifying explicit module to mysql dialect
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
when QueuePool times out it raises a TimeoutError instead of
erroneously making another connection
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
Queue.Queue usage in pool has been replaced with a locally
modified version (works in py2.3/2.4!) that uses a threading.RLock
@@ -682,35 +682,35 @@
causing a reentrant hang unless threading.RLock is used.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
postgres will not place SERIAL keyword on a primary key column
if it has a foreign key constraint
.. change::
- :tags:
+ :tags:
:tickets: 221
cursor() method on ConnectionFairy allows db-specific extension
arguments to be propagated
.. change::
- :tags:
+ :tags:
:tickets: 225
lazy load bind params properly propagate column type
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
new MySQL types: MSEnum, MSTinyText, MSMediumText, MSLongText, etc.
more support for MS-specific length/precision params in numeric types
patch courtesy Mike Bernson
.. change::
- :tags:
+ :tags:
:tickets: 224
some fixes to connection pool invalidate()
@@ -720,23 +720,23 @@
:released: Sat Jun 17 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to mapper compilation to be deferred. this allows mappers
to be constructed in any order, and their relationships to each
other are compiled when the mappers are first used.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed a pretty big speed bottleneck in cascading behavior particularly
when backrefs were in use
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
the attribute instrumentation module has been completely rewritten; its
now a large degree simpler and clearer, slightly faster. the "history"
@@ -746,117 +746,117 @@
list attributes is now more open ended (i.e. theyre not sets anymore).
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
py2.4 "set" construct used internally, falls back to sets.Set when
"set" not available/ordering is needed.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fix to transaction control, so that repeated rollback() calls
dont fail (was failing pretty badly when flush() would raise
an exception in a larger try/except transaction block)
.. change::
- :tags:
+ :tags:
:tickets: 151
"foreignkey" argument to relation() can also be a list. fixed
auto-foreignkey detection
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed bug where tables with schema names werent getting indexed in
the MetaData object properly
.. change::
- :tags:
+ :tags:
:tickets: 207
fixed bug where Column with redefined "key" property wasnt getting
type conversion happening in the ResultProxy
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed 'port' attribute of URL to be an integer if present
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed old bug where if a many-to-many table mapped as "secondary"
had extra columns, delete operations didnt work
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
bugfixes for mapping against UNION queries
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed incorrect exception class thrown when no DB driver present
.. change::
- :tags:
+ :tags:
:tickets: 138
added NonExistentTable exception thrown when reflecting a table
that doesnt exist
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small fix to ActiveMapper regarding one-to-one backrefs, other
refactorings
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overridden constructor in mapped classes gets __name__ and
__doc__ from the original class
.. change::
- :tags:
+ :tags:
:tickets: 200
fixed small bug in selectresult.py regarding mapper extension
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small tweak to cascade_mappers, not very strongly supported
function at the moment
.. change::
- :tags:
+ :tags:
:tickets: 202
some fixes to between(), column.between() to propagate typing
information better
.. change::
- :tags:
+ :tags:
:tickets: 203
if an object fails to be constructed, is not added to the
session
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
CAST function has been made into its own clause object with
its own compilation function in ansicompiler; allows MySQL
@@ -869,97 +869,97 @@
:released: Mon Jun 05 2006
.. change::
- :tags:
+ :tags:
:tickets: 190
big improvements to polymorphic inheritance behavior, enabling it
to work with adjacency list table structures
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
major fixes and refactorings to inheritance relationships overall,
more unit tests
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed "echo_pool" flag on create_engine()
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fix to docs, removed incorrect info that close() is unsafe to use
with threadlocal strategy (its totally safe !)
.. change::
- :tags:
+ :tags:
:tickets: 188
create_engine() can take URLs as string or unicode
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
firebird support partially completed;
thanks to James Ralston and Brad Clements for their efforts.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
Oracle url translation was broken, fixed, will feed host/port/sid
into cx_oracle makedsn() if 'database' field is present, else uses
straight TNS name from the 'host' field
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fix to using unicode criterion for query.get()/query.load()
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
count() function on selectables now uses table primary key or
first column instead of "1" for criterion, also uses label "rowcount"
instead of "count".
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
got rudimental "mapping to multiple tables" functionality cleaned up,
more correctly documented
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
restored global_connect() function, attaches to a DynamicMetaData
instance called "default_metadata". leaving MetaData arg to Table
out will use the default metadata.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixes to session cascade behavior, entity_name propigation
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
reorganized unittests into subdirectories
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
more fixes to threadlocal connection nesting patterns
@@ -968,29 +968,29 @@
:released: Mon May 29 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
"pool" argument to create_engine() properly propagates
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixes to URL, raises exception if not parsed, does not pass blank
fields along to the DB connect string (a string such as
user:host@/db was breaking on postgres)
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
small fixes to Mapper when it inserts and tries to get
new primary key values back
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
rewrote half of TLEngine, the ComposedSQLEngine used with
'strategy="threadlocal"'. it now properly implements engine.begin()/
@@ -998,8 +998,8 @@
added about six unittests.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
major "duh" in pool.Pool, forgot to put back the WeakValueDictionary.
unittest which was supposed to check for this was also silently missing
@@ -1007,35 +1007,35 @@
of scope.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
placeholder dispose() method added to SingletonThreadPool, doesnt
do anything yet
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
rollback() is automatically called when an exception is raised,
but only if theres no transaction in process (i.e. works more like
autocommit).
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
fixed exception raise in sqlite if no sqlite module present
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
added extra example detail for association object doc
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
Connection adds checks for already being closed
@@ -1044,8 +1044,8 @@
:released: Sat May 27 2006
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to Engine system so that what was formerly the SQLEngine
is now a ComposedSQLEngine which consists of a variety of components,
@@ -1053,14 +1053,14 @@
db modules as well as Session and Mapper.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
create_engine now takes only RFC-1738-style strings:
driver://user:password@host:port/database
.. change::
- :tags:
+ :tags:
:tickets: 152
total rewrite of connection-scoping methodology, Connection objects
@@ -1070,8 +1070,8 @@
to the pool.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to Session interface and scoping. uses hibernate-style
methods, including query(class), save(), save_or_update(), etc.
@@ -1082,8 +1082,8 @@
across multiple engines.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to mapper's dependency and "cascade" behavior; dependency logic
factored out of properties.py into a separate module "dependency.py".
@@ -1093,8 +1093,8 @@
decisions on how that child should be updated in the DB with regards to deletes.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to Schema to build upon MetaData object instead of an Engine.
Entire SQL/Schema system can be used with no Engines whatsoever, executed
@@ -1103,13 +1103,13 @@
anymore and is replaced by DynamicMetaData.
.. change::
- :tags:
+ :tags:
:tickets: 167
true polymorphic behavior implemented, fixes
.. change::
- :tags:
+ :tags:
:tickets: 147
"oid" system has been totally moved into compile-time behavior;
@@ -1117,71 +1117,71 @@
doesnt get compiled, fixes
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
overhaul to packaging; "mapping" is now "orm", "objectstore" is now
"session", the old "objectstore" namespace gets loaded in via the
"threadlocal" mod if used
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
mods now called in via "import <modname>". extensions favored over
mods as mods are globally-monkeypatching
.. change::
- :tags:
+ :tags:
:tickets: 154
fix to add_property so that it propagates properties to inheriting
mappers
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
backrefs create themselves against primary mapper of its originating
property, priamry/secondary join arguments can be specified to override.
helps their usage with polymorphic mappers
.. change::
- :tags:
+ :tags:
:tickets: 31
"table exists" function has been implemented
.. change::
- :tags:
+ :tags:
:tickets: 98
"create_all/drop_all" added to MetaData object
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
improvements and fixes to topological sort algorithm, as well as more
unit tests
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
tutorial page added to docs which also can be run with a custom doctest
runner to ensure its properly working. docs generally overhauled to
deal with new code patterns
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
many more fixes, refactorings.
.. change::
- :tags:
- :tickets:
+ :tags:
+ :tickets:
migration guide is available on the Wiki at
http://www.sqlalchemy.org/trac/wiki/02Migration
diff --git a/doc/build/changelog/changelog_03.rst b/doc/build/changelog/changelog_03.rst
index e47da340a..05e10e664 100644
--- a/doc/build/changelog/changelog_03.rst
+++ b/doc/build/changelog/changelog_03.rst
@@ -695,7 +695,7 @@
:tags: orm
:tickets:
- session.get() and session.load() propagate **kwargs through to
+ session.get() and session.load() propagate \**kwargs through to
query
.. change::
@@ -979,7 +979,7 @@
:tickets:
query strings in unicode URLs get keys encoded to ascii
- for **kwargs compat
+ for \**kwargs compat
.. change::
:tags: sql
@@ -1113,7 +1113,7 @@
:tickets:
support for SSL arguments given as inline within URL query string,
- prefixed with "ssl_", courtesy terjeros@gmail.com.
+ prefixed with "ssl\_", courtesy terjeros@gmail.com.
.. change::
:tags: <schemaname>, mysql
@@ -1266,7 +1266,7 @@
:tags: sql
:tickets:
- the "else_" parameter to the case statement now properly works when
+ the "else\_" parameter to the case statement now properly works when
set to zero.
.. change::
@@ -1279,16 +1279,15 @@
and a new one returned with additional criterion added.
The new methods include:
- filter() - applies select criterion to the query
- filter_by() - applies "by"-style criterion to the query
- avg() - return the avg() function on the given column
- join() - join to a property (or across a list of properties)
- outerjoin() - like join() but uses LEFT OUTER JOIN
- limit()/offset() - apply LIMIT/OFFSET
- range-based access which applies limit/offset:
- session.query(Foo)[3:5]
- distinct() - apply DISTINCT
- list() - evaluate the criterion and return results
+ * filter() - applies select criterion to the query
+ * filter_by() - applies "by"-style criterion to the query
+ * avg() - return the avg() function on the given column
+ * join() - join to a property (or across a list of properties)
+ * outerjoin() - like join() but uses LEFT OUTER JOIN
+ * limit()/offset() - apply LIMIT/OFFSET range-based access
+ which applies limit/offset: session.query(Foo)[3:5]
+ * distinct() - apply DISTINCT
+ * list() - evaluate the criterion and return results
no incompatible changes have been made to Query's API and no methods
have been deprecated. Existing methods like select(), select_by(),
@@ -1321,7 +1320,7 @@
:tags: orm
:tickets:
- strings and columns can also be sent to the *args of instances()
+ strings and columns can also be sent to the \*args of instances()
where those exact result columns will be part of the result tuples.
.. change::
@@ -1488,7 +1487,7 @@
:tags: mysql
:tickets:
- added a catchall **kwargs to MSString, to help reflection of
+ added a catchall \**kwargs to MSString, to help reflection of
obscure types (like "varchar() binary" in MS 4.0)
.. change::
@@ -1526,7 +1525,7 @@
:tickets:
fixed argument passing to straight textual execute() on engine,
- connection. can handle *args or a list instance for positional, **kwargs
+ connection. can handle \*args or a list instance for positional, \**kwargs
or a dict instance for named args, or a list of list or dicts to invoke
executemany()
@@ -2364,7 +2363,7 @@
:tags: engine/pool
:tickets:
- create_engine() reworked to be strict about incoming **kwargs. all keyword
+ create_engine() reworked to be strict about incoming \**kwargs. all keyword
arguments must be consumed by one of the dialect, connection pool, and engine
constructors, else a TypeError is thrown which describes the full set of
invalid kwargs in relation to the selected dialect/pool/engine configuration.
diff --git a/doc/build/changelog/changelog_04.rst b/doc/build/changelog/changelog_04.rst
index e1acfe4c0..61ea28c11 100644
--- a/doc/build/changelog/changelog_04.rst
+++ b/doc/build/changelog/changelog_04.rst
@@ -83,14 +83,11 @@
:tickets:
Added "add()" and "add_all()" to scoped_session
- methods. Workaround for 0.4.7:
+ methods. Workaround for 0.4.7::
- from sqlalchemy.orm.scoping import ScopedSession,\
- instrument
- setattr(
- ScopedSession, "add", instrument("add"))
- setattr(
- ScopedSession, "add_all", instrument("add_all"))
+ from sqlalchemy.orm.scoping import ScopedSession, instrument
+ setattr(ScopedSession, "add", instrument("add"))
+ setattr(ScopedSession, "add_all", instrument("add_all"))
.. change::
:tags: orm
@@ -344,7 +341,7 @@
:tags: orm
:tickets:
- set-based collections |=, -=, ^= and &= are stricter about
+ set-based collections \|=, -=, ^= and &= are stricter about
their operands and only operate on sets, frozensets or
subclasses of the collection type. Previously, they would
accept any duck-typed set.
@@ -424,7 +421,7 @@
:tags: ext
:tickets:
- set-based association proxies |=, -=, ^= and &= are
+ set-based association proxies \|=, -=, ^= and &= are
stricter about their operands and only operate on sets,
frozensets or other association proxies. Previously, they
would accept any duck-typed set.
@@ -541,11 +538,12 @@
The new approach also automatically allows eager loads
to work for subclasses, if they are present, for
- example
+ example::
+
sess.query(Company).options(
eagerload_all(
-
))
+
to load Company objects, their employees, and the
'machines' collection of employees who happen to be
Engineers. A "with_polymorphic" Query option should be
@@ -561,7 +559,7 @@
is not carved in stone just yet: _values() and
_from_self(). We'd like feedback on these.
- - _values(*columns) is given a list of column
+ - _values(\*columns) is given a list of column
expressions, and returns a new Query that only
returns those columns. When evaluated, the return
value is a list of tuples just like when using
@@ -594,7 +592,7 @@
:tickets:
query.order_by() and query.group_by() will accept
- multiple arguments using *args (like select()
+ multiple arguments using \*args (like select()
already does).
.. change::
@@ -1780,7 +1778,7 @@
:tags: ext
:tickets:
- '+', '*', '+=' and '*=' support for association
+ '+', '*', '+=' and '\*=' support for association
proxied lists.
.. change::
@@ -1866,7 +1864,7 @@
:tickets:
added new flag to String and create_engine(),
- assert_unicode=(True|False|'warn'|None). Defaults to `False` or `None` on
+ assert_unicode=(True|False|'warn'\|None). Defaults to `False` or `None` on
create_engine() and String, `'warn'` on the Unicode type. When `True`,
results in all unicode conversion operations raising an exception when a
non-unicode bytestring is passed as a bind parameter. 'warn' results
@@ -2010,8 +2008,8 @@
:tickets: 908
mapped classes which extend "object" and do not provide an
- __init__() method will now raise TypeError if non-empty *args
- or **kwargs are present at instance construction time (and are
+ __init__() method will now raise TypeError if non-empty \*args
+ or \**kwargs are present at instance construction time (and are
not consumed by any extensions such as the scoped_session mapper),
consistent with the behavior of normal Python classes
@@ -2818,10 +2816,10 @@
:tickets:
Improvements and fixes on Firebird reflection:
- . FBDialect now mimics OracleDialect, regarding case-sensitivity of TABLE and
- COLUMN names (see 'case_sensitive remotion' topic on this current file).
- . FBDialect.table_names() doesn't bring system tables (ticket:796).
- . FB now reflects Column's nullable property correctly.
+ * FBDialect now mimics OracleDialect, regarding case-sensitivity of TABLE and
+ COLUMN names (see 'case_sensitive remotion' topic on this current file).
+ * FBDialect.table_names() doesn't bring system tables (ticket:796).
+ * FB now reflects Column's nullable property correctly.
.. change::
:tags:
@@ -2963,7 +2961,7 @@
:tags:
:tickets:
- Changed the API for the in_ operator. in_() now accepts a single argument
+ Changed the API for the in\_ operator. in_() now accepts a single argument
that is a sequence of values or a selectable. The old API of passing in
values as varargs still works but is deprecated.
@@ -3246,7 +3244,7 @@
:tags:
:tickets:
- Tidied up what ends up in your namespace when you 'from sqlalchemy import *':
+ Tidied up what ends up in your namespace when you 'from sqlalchemy import \*':
.. change::
:tags:
@@ -3816,10 +3814,10 @@
is represented by more than one column, when using the ORM. Objects of
the new type are fully functional in query expressions, comparisons,
query.get() clauses, etc. and act as though they are regular single-column
- scalars... except they're not! Use the function composite(cls, *columns)
+ scalars... except they're not! Use the function composite(cls, \*columns)
inside of the mapper's "properties" dict, and instances of cls will be
created/mapped to a single attribute, comprised of the values correponding
- to *columns.
+ to \*columns.
.. change::
:tags: orm
@@ -3912,7 +3910,7 @@
:tickets:
All "type" keyword arguments, such as those to bindparam(), column(),
- Column(), and func.<something>(), renamed to "type_". Those objects still
+ Column(), and func.<something>(), renamed to "type\_". Those objects still
name their "type" attribute as "type".
.. change::
diff --git a/doc/build/changelog/changelog_05.rst b/doc/build/changelog/changelog_05.rst
index 0bcc1aa3f..debcc29fd 100644
--- a/doc/build/changelog/changelog_05.rst
+++ b/doc/build/changelog/changelog_05.rst
@@ -1093,7 +1093,7 @@
Session.scalar() now converts raw SQL strings to text()
the same way Session.execute() does and accepts same
- alternative **kw args.
+ alternative \**kw args.
.. change::
:tags: orm
@@ -1506,7 +1506,7 @@
:tickets:
ColumnProperty (and front-end helpers such as ``deferred``) no
- longer ignores unknown **keyword arguments.
+ longer ignores unknown \**keyword arguments.
.. change::
:tags: orm
@@ -2903,7 +2903,7 @@
:tags: orm
:tickets:
- The RowTuple object returned by Query(*cols) now features
+ The RowTuple object returned by Query(\*cols) now features
keynames which prefer mapped attribute names over column keys,
column keys over column names, i.e. Query(Class.foo,
Class.bar) will have names "foo" and "bar" even if those are
@@ -2984,7 +2984,7 @@
:tickets: 1140
class.someprop.in_() raises NotImplementedError pending the
- implementation of "in_" for relation
+ implementation of "in\_" for relation
.. change::
:tags: orm
@@ -3499,7 +3499,7 @@
Unicode, UnicodeText types now set "assert_unicode" and
"convert_unicode" by default, but accept overriding
- **kwargs for these values.
+ \**kwargs for these values.
.. change::
:tags: sql
diff --git a/doc/build/changelog/changelog_06.rst b/doc/build/changelog/changelog_06.rst
index c7f4dcdea..18d61019a 100644
--- a/doc/build/changelog/changelog_06.rst
+++ b/doc/build/changelog/changelog_06.rst
@@ -1013,7 +1013,7 @@
New Query methods: query.label(name), query.as_scalar(),
return the query's statement as a scalar subquery
with /without label;
- query.with_entities(*ent), replaces the SELECT list of
+ query.with_entities(\*ent), replaces the SELECT list of
the query with new entities.
Roughly equivalent to a generative form of query.values()
which accepts mapped entities as well as column
@@ -1246,7 +1246,7 @@
:tags: sql
:tickets:
- Added type_coerce(expr, type_) expression element.
+ Added type_coerce(expr, type\_) expression element.
Treats the given expression as the given type when evaluating
expressions and processing result rows, but does not
affect the generation of SQL, other than an anonymous
@@ -3005,7 +3005,7 @@
:tags: orm
:tickets:
- Query gains an add_columns(*columns) method which is a multi-
+ Query gains an add_columns(\*columns) method which is a multi-
version of add_column(col). add_column(col) is future
deprecated.
@@ -3641,9 +3641,9 @@
:tags: declarative
:tickets:
- DeclarativeMeta exclusively uses cls.__dict__ (not dict_)
+ DeclarativeMeta exclusively uses cls.__dict__ (not dict\_)
as the source of class information; _as_declarative exclusively
- uses the dict_ passed to it as the source of class information
+ uses the dict\_ passed to it as the source of class information
(which when using DeclarativeMeta is cls.__dict__). This should
in theory make it easier for custom metaclasses to modify
the state passed into _as_declarative.
@@ -4190,10 +4190,10 @@
* Passing a single list of elements to eagerload(),
eagerload_all(), contains_eager(), lazyload(),
defer(), and undefer() instead of multiple positional
- *args is deprecated.
+ \*args is deprecated.
* Passing a single list of elements to query.order_by(),
query.group_by(), query.join(), or query.outerjoin()
- instead of multiple positional *args is deprecated.
+ instead of multiple positional \*args is deprecated.
* query.iterate_instances() is removed. Use query.instances().
* Query.query_from_parent() is removed. Use the
sqlalchemy.orm.with_parent() function to produce a
@@ -4363,7 +4363,7 @@
"expr != expr" can be very expensive, and it's preferred
that the user not issue in_() if the list is empty,
instead simply not querying, or modifying the criterion
- as appropriate for more complex situations.
+ as appropriate for more complex situations.
.. change::
:tags: sql
@@ -4523,7 +4523,7 @@
* the "connection" argument from engine.transaction() and
engine.run_callable() is removed - Connection itself
now has those methods. All four methods accept
- *args and **kwargs which are passed to the given callable,
+ \*args and \**kwargs which are passed to the given callable,
as well as the operating connection.
.. change::
@@ -4570,11 +4570,13 @@
Removed public mutability from Index and Constraint
objects:
- - ForeignKeyConstraint.append_element()
- - Index.append_column()
- - UniqueConstraint.append_column()
- - PrimaryKeyConstraint.add()
- - PrimaryKeyConstraint.remove()
+
+ * ForeignKeyConstraint.append_element()
+ * Index.append_column()
+ * UniqueConstraint.append_column()
+ * PrimaryKeyConstraint.add()
+ * PrimaryKeyConstraint.remove()
+
These should be constructed declaratively (i.e. in one
construction).
@@ -4682,18 +4684,22 @@
The signature of the "on" callable passed to DDL() and
DDLElement() is revised as follows:
- "ddl" - the DDLElement object itself.
- "event" - the string event name.
- "target" - previously "schema_item", the Table or
- MetaData object triggering the event.
- "connection" - the Connection object in use for the operation.
- **kw - keyword arguments. In the case of MetaData before/after
- create/drop, the list of Table objects for which
- CREATE/DROP DDL is to be issued is passed as the kw
- argument "tables". This is necessary for metadata-level
- DDL that is dependent on the presence of specific tables.
+ ddl
+ the DDLElement object itself
+ event
+ the string event name.
+ target
+ previously "schema_item", the Table or MetaData object triggering the event.
+ connection
+ the Connection object in use for the operation.
+ \**kw
+ keyword arguments. In the case of MetaData before/after
+ create/drop, the list of Table objects for which
+ CREATE/DROP DDL is to be issued is passed as the kw
+ argument "tables". This is necessary for metadata-level
+ DDL that is dependent on the presence of specific tables.
- - the "schema_item" attribute of DDL has been renamed to
+ The "schema_item" attribute of DDL has been renamed to
"target".
.. change::
diff --git a/doc/build/changelog/changelog_07.rst b/doc/build/changelog/changelog_07.rst
index 99702a2f0..da89bbde3 100644
--- a/doc/build/changelog/changelog_07.rst
+++ b/doc/build/changelog/changelog_07.rst
@@ -7,15 +7,61 @@
:version: 0.7.11
.. change::
+ :tags: bug, engine
+ :tickets: 2851
+ :versions: 0.8.3, 0.9.0b1
+
+ The regexp used by the :func:`~sqlalchemy.engine.url.make_url` function now parses
+ ipv6 addresses, e.g. surrounded by brackets.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2807
+ :versions: 0.8.3, 0.9.0b1
+
+ Fixed bug where list instrumentation would fail to represent a
+ setslice of ``[0:0]`` correctly, which in particular could occur
+ when using ``insert(0, item)`` with the association proxy. Due
+ to some quirk in Python collections, the issue was much more likely
+ with Python 3 rather than 2.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2801
+ :versions: 0.8.3, 0.9.0b1
+
+ Fixed regression dating back to 0.7.9 whereby the name of a CTE might
+ not be properly quoted if it was referred to in multiple FROM clauses.
+
+ .. change::
+ :tags: mysql, bug
+ :tickets: 2791
+ :versions: 0.8.3, 0.9.0b1
+
+ Updates to MySQL reserved words for versions 5.5, 5.6, courtesy
+ Hanno Schlichting.
+
+ .. change::
+ :tags: sql, bug, cte
+ :tickets: 2783
+ :versions: 0.8.3, 0.9.0b1
+
+ Fixed bug in common table expression system where if the CTE were
+ used only as an ``alias()`` construct, it would not render using the
+ WITH keyword.
+
+ .. change::
:tags: bug, sql
:tickets: 2784
+ :versions: 0.8.3, 0.9.0b1
Fixed bug in :class:`.CheckConstraint` DDL where the "quote" flag from a
:class:`.Column` object would not be propagated.
.. change::
- :tags: bug, orm
- :tickets: 2699
+ :tags: bug, orm
+ :tickets: 2699
+ :versions: 0.8.1
Fixed bug when a query of the form:
``query(SubClass).options(subqueryload(Baseclass.attrname))``,
@@ -30,6 +76,7 @@
.. change::
:tags: bug, orm
:tickets: 2689
+ :versions: 0.8.1
Fixed bug in unit of work whereby a joined-inheritance
subclass could insert the row for the "sub" table
@@ -39,6 +86,7 @@
.. change::
:tags: feature, postgresql
:tickets: 2676
+ :versions: 0.8.0
Added support for Postgresql's traditional SUBSTRING
function syntax, renders as "SUBSTRING(x FROM y FOR z)"
@@ -81,6 +129,27 @@
:released: Thu Feb 7 2013
.. change::
+ :tags: engine, bug
+ :tickets: 2604
+ :versions: 0.8.0b2
+
+ Fixed :meth:`.MetaData.reflect` to correctly use
+ the given :class:`.Connection`, if given, without
+ opening a second connection from that connection's
+ :class:`.Engine`.
+
+ .. change::
+ :tags: mssql, bug
+ :tickets:2607
+ :versions: 0.8.0b2
+
+ Fixed bug whereby using "key" with Column
+ in conjunction with "schema" for the owning
+ Table would fail to locate result rows due
+ to the MSSQL dialect's "schema rendering"
+ logic's failure to take .key into account.
+
+ .. change::
:tags: sql, mysql, gae
:tickets: 2649
@@ -138,6 +207,7 @@
.. change::
:tags: sqlite, bug
:tickets: 2568
+ :versions: 0.8.0b2
More adjustment to this SQLite related issue which was released in
0.7.9, to intercept legacy SQLite quoting characters when reflecting
@@ -148,6 +218,7 @@
.. change::
:tags: sql, bug
:tickets: 2631
+ :versions: 0.8.0b2
Fixed bug where using server_onupdate=<FetchedValue|DefaultClause>
without passing the "for_update=True" flag would apply the default
@@ -1138,12 +1209,12 @@
:tickets:
Added some decent context managers
- to Engine, Connection:
+ to Engine, Connection::
with engine.begin() as conn:
<work with conn in a transaction>
- and:
+ and::
with engine.connect() as conn:
<work with conn>
@@ -1394,10 +1465,10 @@
:tickets: 2361
Dialect-specific compilers now raise
- CompileException for all type/statement compilation
+ CompileError for all type/statement compilation
issues, instead of InvalidRequestError or ArgumentError.
The DDL for CREATE TABLE will re-raise
- CompileExceptions to include table/column information
+ CompileError to include table/column information
for the problematic column.
.. change::
@@ -1728,10 +1799,10 @@
polymorphic_on now accepts many
new kinds of values:
- - standalone expressions that aren't
+ * standalone expressions that aren't
otherwise mapped
- - column_property() objects
- - string names of any column_property()
+ * column_property() objects
+ * string names of any column_property()
or attribute name of a mapped Column
The docs include an example using
@@ -1897,7 +1968,7 @@
:tickets: 1679
a "has_schema" method has been implemented
- on dialect, but only works on Postgresql so far.
+ on dialect, but only works on Postgresql so far.
Courtesy Manlio Perillo.
.. change::
@@ -2124,7 +2195,7 @@
Enhanced the instrumentation in the ORM to support
Py3K's new argument style of "required kw arguments",
- i.e. fn(a, b, *, c, d), fn(a, b, *args, c, d).
+ i.e. fn(a, b, \*, c, d), fn(a, b, \*args, c, d).
Argument signatures of mapped object's __init__
method will be preserved, including required kw rules.
@@ -2146,8 +2217,9 @@
Fixed a variety of synonym()-related regressions
from 0.6:
- - making a synonym against a synonym now works.
- - synonyms made against a relationship() can
+
+ * making a synonym against a synonym now works.
+ * synonyms made against a relationship() can
be passed to query.join(), options sent
to query.options(), passed by name
to query.with_parent().
@@ -2256,19 +2328,20 @@
:tickets: 2239
New declarative features:
- - __declare_last__() method, establishes an event
- listener for the class method that will be called
- when mappers are completed with the final "configure"
- step.
- - __abstract__ flag. The class will not be mapped
- at all when this flag is present on the class.
- - New helper classes ConcreteBase, AbstractConcreteBase.
- Allow concrete mappings using declarative which automatically
- set up the "polymorphic_union" when the "configure"
- mapper step is invoked.
- - The mapper itself has semi-private methods that allow
- the "with_polymorphic" selectable to be assigned
- to the mapper after it has already been configured.
+
+ * __declare_last__() method, establishes an event
+ listener for the class method that will be called
+ when mappers are completed with the final "configure"
+ step.
+ * __abstract__ flag. The class will not be mapped
+ at all when this flag is present on the class.
+ * New helper classes ConcreteBase, AbstractConcreteBase.
+ Allow concrete mappings using declarative which automatically
+ set up the "polymorphic_union" when the "configure"
+ mapper step is invoked.
+ * The mapper itself has semi-private methods that allow
+ the "with_polymorphic" selectable to be assigned
+ to the mapper after it has already been configured.
.. change::
:tags: orm
@@ -2806,7 +2879,7 @@
:tickets: 2206
Fixed bug whereby adaptation of old append_ddl_listener()
- function was passing unexpected **kw through
+ function was passing unexpected \**kw through
to the Table event. Table gets no kws, the MetaData
event in 0.6 would get "tables=somecollection",
this behavior is preserved.
@@ -4108,6 +4181,10 @@
Mutation Event Extension, supercedes "mutable=True"
+ .. seealso::
+
+ :ref:`07_migration_mutation_extension`
+
.. change::
:tags: orm
:tickets: 1980
@@ -4271,7 +4348,7 @@
:tickets: 1069
Query.distinct() now accepts column expressions
- as *args, interpreted by the Postgresql dialect
+ as \*args, interpreted by the Postgresql dialect
as DISTINCT ON (<expr>).
.. change::
@@ -4371,7 +4448,7 @@
:tickets: 1069
select.distinct() now accepts column expressions
- as *args, interpreted by the Postgresql dialect
+ as \*args, interpreted by the Postgresql dialect
as DISTINCT ON (<expr>). Note this was already
available via passing a list to the `distinct`
keyword argument to select().
@@ -4381,7 +4458,7 @@
:tickets:
select.prefix_with() accepts multiple expressions
- (i.e. *expr), 'prefix' keyword argument to select()
+ (i.e. \*expr), 'prefix' keyword argument to select()
accepts a list or tuple.
.. change::
diff --git a/doc/build/changelog/changelog_08.rst b/doc/build/changelog/changelog_08.rst
index 4e5454180..e4e5c11ab 100644
--- a/doc/build/changelog/changelog_08.rst
+++ b/doc/build/changelog/changelog_08.rst
@@ -3,19 +3,516 @@
0.8 Changelog
==============
+.. changelog_imports::
+
+ .. include:: changelog_07.rst
+ :start-line: 5
+
+.. changelog::
+ :version: 0.8.5
+
+ .. change::
+ :tags: bug, orm
+ :versions: 0.9.2
+ :pullreq: github:58
+
+ Fixed error message when an iterator object is passed to
+ :func:`.class_mapper` or similar, where the error would fail to
+ render on string formatting. Pullreq courtesy Kyle Stark.
+
+ .. change::
+ :tags: bug, firebird
+ :versions: 0.9.0
+ :tickets: 2897
+
+ The firebird dialect will quote identifiers which begin with an
+ underscore. Courtesy Treeve Jelbert.
+
+ .. change::
+ :tags: bug, firebird
+ :versions: 0.9.0
+
+ Fixed bug in Firebird index reflection where the columns within the
+ index were not sorted correctly; they are now sorted
+ in order of RDB$FIELD_POSITION.
+
+ .. change::
+ :tags: bug, mssql, firebird
+ :versions: 0.9.0
+
+ The "asdecimal" flag used with the :class:`.Float` type will now
+ work with Firebird as well as the mssql+pyodbc dialects; previously the
+ decimal conversion was not occurring.
+
+ .. change::
+ :tags: bug, mssql, pymssql
+ :versions: 0.9.0
+ :pullreq: github:51
+
+ Added "Net-Lib error during Connection reset by peer" message
+ to the list of messages checked for "disconnect" within the
+ pymssql dialect. Courtesy John Anderson.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 0.9.0
+ :tickets: 2896
+
+ Fixed issue where a primary key column that has a Sequence on it,
+ yet the column is not the "auto increment" column, either because
+ it has a foreign key constraint or ``autoincrement=False`` set,
+ would attempt to fire the Sequence on INSERT for backends that don't
+ support sequences, when presented with an INSERT missing the primary
+ key value. This would take place on non-sequence backends like
+ SQLite, MySQL.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 0.9.0
+ :tickets: 2895
+
+ Fixed bug with :meth:`.Insert.from_select` method where the order
+ of the given names would not be taken into account when generating
+ the INSERT statement, thus producing a mismatch versus the column
+ names in the given SELECT statement. Also noted that
+ :meth:`.Insert.from_select` implies that Python-side insert defaults
+ cannot be used, since the statement has no VALUES clause.
+
+ .. change::
+ :tags: enhancement, sql
+ :versions: 0.9.0
+
+ The exception raised when a :class:`.BindParameter` is present
+ in a compiled statement without a value now includes the key name
+ of the bound parameter in the error message.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 0.9.0
+ :tickets: 2887
+
+ An adjustment to the :func:`.subqueryload` strategy which ensures that
+ the query runs after the loading process has begun; this is so that
+ the subqueryload takes precedence over other loaders that may be
+ hitting the same attribute due to other eager/noload situations
+ at the wrong time.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 0.9.0
+ :tickets: 2885
+
+ Fixed bug when using joined table inheritance from a table to a
+ select/alias on the base, where the PK columns were also not same
+ named; the persistence system would fail to copy primary key values
+ from the base table to the inherited table upon INSERT.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 0.9.0
+ :tickets: 2889
+
+ :func:`.composite` will raise an informative error message when the
+ columns/attribute (names) passed don't resolve to a Column or mapped
+ attribute (such as an erroneous tuple); previously raised an unbound
+ local.
+
+ .. change::
+ :tags: bug, declarative
+ :versions: 0.9.0
+ :tickets: 2888
+
+ Error message when a string arg sent to :func:`.relationship` which
+ doesn't resolve to a class or mapper has been corrected to work
+ the same way as when a non-string arg is received, which indicates
+ the name of the relationship which had the configurational error.
+
+.. changelog::
+ :version: 0.8.4
+
+ .. change::
+ :tags: bug, engine
+ :versions: 0.9.0
+ :tickets: 2881
+
+ A DBAPI that raises an error on ``connect()`` which is not a subclass
+ of dbapi.Error (such as ``TypeError``, ``NotImplementedError``, etc.)
+ will propagate the exception unchanged. Previously,
+ the error handling specific to the ``connect()`` routine would both
+ inappropriately run the exception through the dialect's
+ :meth:`.Dialect.is_disconnect` routine as well as wrap it in
+ a :class:`sqlalchemy.exc.DBAPIError`. It is now propagated unchanged
+ in the same way as occurs within the execute process.
+
+ .. change::
+ :tags: bug, engine, pool
+ :versions: 0.9.0
+ :tickets: 2880
+
+ The :class:`.QueuePool` has been enhanced to not block new connection
+ attempts when an existing connection attempt is blocking. Previously,
+ the production of new connections was serialized within the block
+ that monitored overflow; the overflow counter is now altered within
+ it's own critical section outside of the connection process itself.
+
+ .. change::
+ :tags: bug, engine, pool
+ :versions: 0.9.0
+ :tickets: 2522
+
+ Made a slight adjustment to the logic which waits for a pooled
+ connection to be available, such that for a connection pool
+ with no timeout specified, it will every half a second break out of
+ the wait to check for the so-called "abort" flag, which allows the
+ waiter to break out in case the whole connection pool was dumped;
+ normally the waiter should break out due to a notify_all() but it's
+ possible this notify_all() is missed in very slim cases.
+ This is an extension of logic first introduced in 0.8.0, and the
+ issue has only been observed occasionally in stress tests.
+
+ .. change::
+ :tags: bug, mssql
+ :versions: 0.9.0
+ :pullreq: bitbucket:7
+
+ Fixed bug introduced in 0.8.0 where the ``DROP INDEX``
+ statement for an index in MSSQL would render incorrectly if the
+ index were in an alternate schema; the schemaname/tablename
+ would be reversed. The format has been also been revised to
+ match current MSSQL documentation. Courtesy Derek Harland.
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 1443
+ :versions: 0.9.0b1
+
+ Added support for "unique constraint" reflection, via the
+ :meth:`.Inspector.get_unique_constraints` method.
+ Thanks for Roman Podolyaka for the patch.
+
+ .. change::
+ :tags: bug, oracle
+ :tickets: 2864
+ :versions: 0.9.0
+
+ Added ORA-02396 "maximum idle time" error code to list of
+ "is disconnect" codes with cx_oracle.
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 2871
+ :versions: 0.9.0
+
+ Fixed bug where SQL statement would be improperly ASCII-encoded
+ when a pre-DBAPI :class:`.StatementError` were raised within
+ :meth:`.Connection.execute`, causing encoding errors for
+ non-ASCII statements. The stringification now remains within
+ Python unicode thus avoiding encoding errors.
+
+ .. change::
+ :tags: bug, oracle
+ :tickets: 2870
+ :versions: 0.9.0
+
+ Fixed bug where Oracle ``VARCHAR`` types given with no length
+ (e.g. for a ``CAST`` or similar) would incorrectly render ``None CHAR``
+ or similar.
+
+ .. change::
+ :tags: bug, ext
+ :tickets: 2869
+ :versions: 0.9.0
+
+ Fixed bug which prevented the ``serializer`` extension from working
+ correctly with table or column names that contain non-ASCII
+ characters.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2818
+ :versions: 0.9.0
+
+ Fixed a regression introduced by :ticket:`2818` where the EXISTS
+ query being generated would produce a "columns being replaced"
+ warning for a statement with two same-named columns,
+ as the internal SELECT wouldn't have use_labels set.
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2855
+ :versions: 0.9.0
+
+ Fixed bug where index reflection would mis-interpret indkey values
+ when using the pypostgresql adapter, which returns these values
+ as lists vs. psycopg2's return type of string.
+
.. changelog::
:version: 0.8.3
+ :released: October 26, 2013
+
+ .. change::
+ :tags: bug, oracle
+ :tickets: 2853
+ :versions: 0.9.0b1
+
+ Fixed bug where Oracle table reflection using synonyms would fail
+ if the synonym and the table were in different remote schemas.
+ Patch to fix courtesy Kyle Derr.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2849
+ :versions: 0.9.0b1
+
+ Fixed bug where :func:`.type_coerce` would not interpret ORM
+ elements with a ``__clause_element__()`` method properly.
.. change::
:tags: bug, sql
- :tickets: 2784
+ :tickets: 2842
+ :versions: 0.9.0b1
- Fixed bug in :class:`.CheckConstraint` DDL where the "quote" flag from a
- :class:`.Column` object would not be propagated. Also in 0.7.11.
+ The :class:`.Enum` and :class:`.Boolean` types now bypass
+ any custom (e.g. TypeDecorator) type in use when producing the
+ CHECK constraint for the "non native" type. This so that the custom type
+ isn't involved in the expression within the CHECK, since this
+ expression is against the "impl" value and not the "decorated" value.
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2844
+ :versions: 0.9.0b1
+
+ Removed a 128-character truncation from the reflection of the
+ server default for a column; this code was original from
+ PG system views which truncated the string for readability.
+
+ .. change::
+ :tags: bug, mysql
+ :tickets: 2721, 2839
+ :versions: 0.9.0b1
+
+ The change in :ticket:`2721`, which is that the ``deferrable`` keyword
+ of :class:`.ForeignKeyConstraint` is silently ignored on the MySQL
+ backend, will be reverted as of 0.9; this keyword will now render again, raising
+ errors on MySQL as it is not understood - the same behavior will also
+ apply to the ``initially`` keyword. In 0.8, the keywords will remain
+ ignored but a warning is emitted. Additionally, the ``match`` keyword
+ now raises a :exc:`.CompileError` on 0.9 and emits a warning on 0.8;
+ this keyword is not only silently ignored by MySQL but also breaks
+ the ON UPDATE/ON DELETE options.
+
+ To use a :class:`.ForeignKeyConstraint`
+ that does not render or renders differently on MySQL, use a custom
+ compilation option. An example of this usage has been added to the
+ documentation, see :ref:`mysql_foreign_keys`.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2825
+ :versions: 0.9.0b1
+
+ The ``.unique`` flag on :class:`.Index` could be produced as ``None``
+ if it was generated from a :class:`.Column` that didn't specify ``unique``
+ (where it defaults to ``None``). The flag will now always be ``True`` or
+ ``False``.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2836
+ :versions: 0.9.0b1
+
+ Added new option to :func:`.relationship` ``distinct_target_key``.
+ This enables the subquery eager loader strategy to apply a DISTINCT
+ to the innermost SELECT subquery, to assist in the case where
+ duplicate rows are generated by the innermost query which corresponds
+ to this relationship (there's not yet a general solution to the issue
+ of dupe rows within subquery eager loading, however, when joins outside
+ of the innermost subquery produce dupes). When the flag
+ is set to ``True``, the DISTINCT is rendered unconditionally, and when
+ it is set to ``None``, DISTINCT is rendered if the innermost relationship
+ targets columns that do not comprise a full primary key.
+ The option defaults to False in 0.8 (e.g. off by default in all cases),
+ None in 0.9 (e.g. automatic by default). Thanks to Alexander Koval
+ for help with this.
+
+ .. seealso::
+
+ :ref:`change_2836`
+
+ .. change::
+ :tags: bug, mysql
+ :tickets: 2515
+ :versions: 0.9.0b1
+
+ MySQL-connector dialect now allows options in the create_engine
+ query string to override those defaults set up in the connect,
+ including "buffered" and "raise_on_warnings".
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2742
+ :versions: 0.9.0b1
+
+ Parenthesis will be applied to a compound SQL expression as
+ rendered in the column list of a CREATE INDEX statement.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2742
+ :versions: 0.9.0b1
+
+ Fixed bug in default compiler plus those of postgresql, mysql, and
+ mssql to ensure that any literal SQL expression values are
+ rendered directly as literals, instead of as bound parameters,
+ within a CREATE INDEX statement. This also changes the rendering
+ scheme for other DDL such as constraints.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2815
+ :versions: 0.9.0b1
+
+ A :func:`.select` that is made to refer to itself in its FROM clause,
+ typically via in-place mutation, will raise an informative error
+ message rather than causing a recursion overflow.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2813
+ :versions: 0.9.0b1
+
+ Fixed bug where using an annotation such as :func:`.remote` or
+ :func:`.foreign` on a :class:`.Column` before association with a parent
+ :class:`.Table` could produce issues related to the parent table not
+ rendering within joins, due to the inherent copy operation performed
+ by an annotation.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2831
+
+ Non-working "schema" argument on :class:`.ForeignKey` is deprecated;
+ raises a warning. Removed in 0.9.
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2819
+ :versions: 0.9.0b1
+
+ Fixed bug where Postgresql version strings that had a prefix preceding
+ the words "Postgresql" or "EnterpriseDB" would not parse.
+ Courtesy Scott Schaefer.
+
+ .. change::
+ :tags: feature, engine
+ :tickets: 2821
+ :versions: 0.9.0b1
+
+ ``repr()`` for the :class:`.URL` of an :class:`.Engine`
+ will now conceal the password using asterisks.
+ Courtesy Gunnlaugur Þór Briem.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2818
+ :versions: 0.9.0b1
+
+ Fixed bug where :meth:`.Query.exists` failed to work correctly
+ without any WHERE criterion. Courtesy Vladimir Magamedov.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2811
+ :versions: 0.9.0b1
+
+ Fixed bug where using the ``column_reflect`` event to change the ``.key``
+ of the incoming :class:`.Column` would prevent primary key constraints,
+ indexes, and foreign key constraints from being correctly reflected.
+
+ .. change::
+ :tags: feature
+ :versions: 0.9.0b1
+
+ Added a new flag ``system=True`` to :class:`.Column`, which marks
+ the column as a "system" column which is automatically made present
+ by the database (such as Postgresql ``oid`` or ``xmin``). The
+ column will be omitted from the ``CREATE TABLE`` statement but will
+ otherwise be available for querying. In addition, the
+ :class:`.CreateColumn` construct can be appled to a custom
+ compilation rule which allows skipping of columns, by producing
+ a rule that returns ``None``.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2779
+
+ Backported a change from 0.9 whereby the iteration of a hierarchy
+ of mappers used in polymorphic inheritance loads is sorted,
+ which allows the SELECT statements generated for polymorphic queries
+ to have deterministic rendering, which in turn helps with caching
+ schemes that cache on the SQL string itself.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2794
+ :versions: 0.9.0b1
+
+ Fixed a potential issue in an ordered sequence implementation used
+ by the ORM to iterate mapper hierarchies; under the Jython interpreter
+ this implementation wasn't ordered, even though cPython and Pypy
+ maintained ordering.
+
+ .. change::
+ :tags: bug, examples
+ :versions: 0.9.0b1
+
+ Added "autoincrement=False" to the history table created in the
+ versioning example, as this table shouldn't have autoinc on it
+ in any case, courtesy Patrick Schmid.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 0.9.0b1
+
+ The :meth:`.ColumnOperators.notin_` operator added in 0.8 now properly
+ produces the negation of the expression "IN" returns
+ when used against an empty collection.
+
+ .. change::
+ :tags: feature, examples
+ :versions: 0.9.0b1
+
+ Improved the examples in ``examples/generic_associations``, including
+ that ``discriminator_on_association.py`` makes use of single table
+ inheritance do the work with the "discriminator". Also
+ added a true "generic foreign key" example, which works similarly
+ to other popular frameworks in that it uses an open-ended integer
+ to point to any other table, foregoing traditional referential
+ integrity. While we don't recommend this pattern, information wants
+ to be free.
+
+ .. change::
+ :tags: feature, orm, declarative
+ :versions: 0.9.0b1
+
+ Added a convenience class decorator :func:`.as_declarative`, is
+ a wrapper for :func:`.declarative_base` which allows an existing base
+ class to be applied using a nifty class-decorated approach.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2786
+ :versions: 0.9.0b1
+
+ Fixed bug in ORM-level event registration where the "raw" or
+ "propagate" flags could potentially be mis-configured in some
+ "unmapped base class" configurations.
.. change::
:tags: bug, orm
:tickets: 2778
+ :versions: 0.9.0b1
A performance fix related to the usage of the :func:`.defer` option
when loading mapped entities. The function overhead of applying
@@ -31,6 +528,7 @@
.. change::
:tags: bug, sqlite
:tickets: 2781
+ :versions: 0.9.0b1
The newly added SQLite DATETIME arguments storage_format and
regexp apparently were not fully implemented correctly; while the
@@ -40,6 +538,7 @@
.. change::
:tags: bug, sql, postgresql
:tickets: 2780
+ :versions: 0.9.0b1
Fixed bug where the expression system relied upon the ``str()``
form of a some expressions when referring to the ``.c`` collection
@@ -47,13 +546,14 @@
since the element relies on dialect-specific compilation constructs,
notably the ``__getitem__()`` operator as used with a Postgresql
``ARRAY`` element. The fix also adds a new exception class
- :class:`.UnsupportedCompilationError` which is raised in those cases
+ :exc:`.UnsupportedCompilationError` which is raised in those cases
where a compiler is asked to compile something it doesn't know
how to.
.. change::
:tags: bug, engine, oracle
:tickets: 2776
+ :versions: 0.9.0b1
Dialect.initialize() is not called a second time if an :class:`.Engine`
is recreated, due to a disconnect error. This fixes a particular
@@ -70,6 +570,7 @@
.. change::
:tags: feature, sql
+ :versions: 0.9.0b1
The :func:`.update`, :func:`.insert`, and :func:`.delete` constructs
will now interpret ORM entities as target tables to be operated upon,
@@ -86,6 +587,7 @@
.. change::
:tags: bug, orm
:tickets: 2773
+ :versions: 0.9.0b1
Fixed bug whereby attribute history functions would fail
when an object we moved from "persistent" to "pending"
@@ -95,6 +597,7 @@
.. change::
:tags: bug, engine, pool
:tickets: 2772
+ :versions: 0.9.0b1
Fixed bug where :class:`.QueuePool` would lose the correct
checked out count if an existing pooled connection failed to reconnect
@@ -107,6 +610,7 @@
.. change::
:tags: bug, mysql
:tickets: 2768
+ :versions: 0.9.0b1
Fixed bug when using multi-table UPDATE where a supplemental
table is a SELECT with its own bound parameters, where the positioning
@@ -116,13 +620,15 @@
.. change::
:tags: bug, sqlite
:tickets: 2764
+ :versions: 0.9.0b1
- Added :class:`.BIGINT` to the list of type names that can be
+ Added :class:`sqlalchemy.types.BIGINT` to the list of type names that can be
reflected by the SQLite dialect; courtesy Russell Stuart.
.. change::
:tags: feature, orm, declarative
:tickets: 2761
+ :versions: 0.9.0b1
ORM descriptors such as hybrid properties can now be referenced
by name in a string argument used with ``order_by``,
@@ -132,15 +638,17 @@
.. change::
:tags: feature, firebird
:tickets: 2763
+ :versions: 0.9.0b1
Added new flag ``retaining=True`` to the kinterbasdb and fdb dialects.
This controls the value of the ``retaining`` flag sent to the
``commit()`` and ``rollback()`` methods of the DBAPI connection.
- Due to historical concerns, this flag defaults to ``True``, however
- in 0.9 this flag will be defaulted to ``False``.
+ Due to historical concerns, this flag defaults to ``True`` in 0.8.2,
+ however in 0.9.0b1 this flag defaults to ``False``.
.. change::
:tags: requirements
+ :versions: 0.9.0b1
The Python `mock <https://pypi.python.org/pypi/mock>`_ library
is now required in order to run the unit test suite. While part
@@ -151,6 +659,7 @@
.. change::
:tags: bug, orm
:tickets: 2750
+ :versions: 0.9.0b1
A warning is emitted when trying to flush an object of an inherited
class where the polymorphic discriminator has been assigned
@@ -159,6 +668,7 @@
.. change::
:tags: bug, postgresql
:tickets: 2740
+ :versions: 0.9.0b1
The behavior of :func:`.extract` has been simplified on the
Postgresql dialect to no longer inject a hardcoded ``::timestamp``
@@ -171,6 +681,7 @@
.. change::
:tags: bug, firebird
:tickets: 2757
+ :versions: 0.9.0b1
Type lookup when reflecting the Firebird types LONG and
INT64 has been fixed so that LONG is treated as INTEGER,
@@ -181,6 +692,7 @@
.. change::
:tags: bug, postgresql
:tickets: 2766
+ :versions: 0.9.0b1
Fixed bug in HSTORE type where keys/values that contained
backslashed quotes would not be escaped correctly when
@@ -190,6 +702,7 @@
.. change::
:tags: bug, postgresql
:tickets: 2767
+ :versions: 0.9.0b1
Fixed bug where the order of columns in a multi-column
Postgresql index would be reflected in the wrong order.
@@ -198,6 +711,7 @@
.. change::
:tags: bug, sql
:tickets: 2746, 2668
+ :versions: 0.9.0b1
Multiple fixes to the correlation behavior of
:class:`.Select` constructs, first introduced in 0.8.0:
@@ -232,6 +746,7 @@
.. change::
:tags: bug, ext
+ :versions: 0.9.0b1
Fixed bug whereby if a composite type were set up
with a function instead of a class, the mutable extension
@@ -252,6 +767,7 @@
.. change::
:tags: feature, postgresql
+ :versions: 0.9.0b1
Support for Postgresql 9.2 range types has been added.
Currently, no type translation is provided, so works
@@ -260,6 +776,7 @@
.. change::
:tags: bug, examples
+ :versions: 0.9.0b1
Fixed an issue with the "versioning" recipe whereby a many-to-one
reference could produce a meaningless version for the target,
@@ -269,6 +786,7 @@
.. change::
:tags: feature, postgresql
:tickets: 2072
+ :versions: 0.9.0b1
Added support for "AUTOCOMMIT" isolation when using the psycopg2
DBAPI. The keyword is available via the ``isolation_level``
@@ -277,6 +795,7 @@
.. change::
:tags: bug, orm
:tickets: 2759
+ :versions: 0.9.0b1
Fixed bug in polymorphic SQL generation where multiple joined-inheritance
entities against the same base class joined to each other as well
@@ -285,7 +804,8 @@
.. change::
:tags: bug, engine
- :pullreq: 6
+ :pullreq: github:6
+ :versions: 0.9.0b1
Fixed bug where the ``reset_on_return`` argument to various :class:`.Pool`
implementations would not be propagated when the pool was regenerated.
@@ -294,6 +814,7 @@
.. change::
:tags: bug, orm
:tickets: 2754
+ :versions: 0.9.0b1
Fixed bug where sending a composite attribute into :meth:`.Query.order_by`
would produce a parenthesized expression not accepted by some databases.
@@ -301,6 +822,7 @@
.. change::
:tags: bug, orm
:tickets: 2755
+ :versions: 0.9.0b1
Fixed the interaction between composite attributes and
the :func:`.aliased` function. Previously, composite attributes
@@ -310,6 +832,7 @@
.. change::
:tags: bug, mysql
:tickets: 2715
+ :versions: 0.9.0b1
Added another conditional to the ``mysql+gaerdbms`` dialect to
detect so-called "development" mode, where we should use the
@@ -318,6 +841,7 @@
.. change::
:tags: feature, mysql
:tickets: 2704
+ :versions: 0.9.0b1
The ``mysql_length`` parameter used with :class:`.Index` can now
be passed as a dictionary of column names/lengths, for use
@@ -327,6 +851,7 @@
.. change::
:tags: bug, mssql
:tickets: 2747
+ :versions: 0.9.0b1
When querying the information schema on SQL Server 2000, removed
a CAST call that was added in 0.8.1 to help with driver issues,
@@ -336,6 +861,7 @@
.. change::
:tags: bug, mysql
:tickets: 2721
+ :versions: 0.9.0b1
The ``deferrable`` keyword argument on :class:`.ForeignKey` and
:class:`.ForeignKeyConstraint` will not render the ``DEFERRABLE`` keyword
@@ -347,6 +873,7 @@
.. change::
:tags: bug, ext, orm
:tickets: 2730
+ :versions: 0.9.0b1
Fixed bug where :class:`.MutableDict` didn't report a change event
when ``clear()`` was called.
@@ -354,6 +881,7 @@
.. change::
:tags: bug, sql
:tickets: 2738
+ :versions: 0.9.0b1
Fixed bug whereby joining a select() of a table "A" with multiple
foreign key paths to a table "B", to that table "B", would fail
@@ -364,6 +892,7 @@
.. change::
:tags: bug, sql, reflection
:tickets: 2728
+ :versions: 0.9.0b1
Fixed bug whereby using :meth:`.MetaData.reflect` across a remote
schema as well as a local schema could produce wrong results
@@ -372,6 +901,7 @@
.. change::
:tags: bug, sql
:tickets: 2726
+ :versions: 0.9.0b1
Removed the "not implemented" ``__iter__()`` call from the base
:class:`.ColumnOperators` class, while this was introduced
@@ -406,6 +936,7 @@
.. change::
:tags: bug, postgresql
+ :pullreq: github:2
:tickets: 2735
Fixed the HSTORE type to correctly encode/decode for unicode.
@@ -541,7 +1072,6 @@
.. change::
:tags: bug, mysql
- :pullreq: 55
Fixes to support the latest cymysql DBAPI, courtesy
Hajime Nakagami.
@@ -603,7 +1133,6 @@
.. change::
:tags: bug, mysql
- :pullreq: 54
Updated a regexp to correctly extract error code on
google app engine v1.7.5 and newer. Courtesy
@@ -712,32 +1241,7 @@
(obviously assuming the state of the superclass).
.. change::
- :tags: bug, orm
- :tickets: 2699
-
- Fixed bug when a query of the form:
- ``query(SubClass).options(subqueryload(Baseclass.attrname))``,
- where ``SubClass`` is a joined inh of ``BaseClass``,
- would fail to apply the ``JOIN`` inside the subquery
- on the attribute load, producing a cartesian product.
- The populated results still tended to be correct as additional
- rows are just ignored, so this issue may be present as a
- performance degradation in applications that are
- otherwise working correctly. Also in 0.7.11.
-
- .. change::
- :tags: bug, orm
- :tickets: 2689
-
- Fixed bug in unit of work whereby a joined-inheritance
- subclass could insert the row for the "sub" table
- before the parent table, if the two tables had no
- ForeignKey constraints set up between them.
- Also in 0.7.11.
-
- .. change::
:tags: bug, mssql
- :pullreq: 47
Added support for additional "disconnect" messages
to the pymssql dialect. Courtesy John Anderson.
@@ -761,7 +1265,6 @@
.. change::
:tags: bug, mssql
:tickets: 2683
- :pullreq: 46
Fixed Py3K bug regarding "binary" types and
pymssql. Courtesy Marc Abramowitz.
@@ -790,15 +1293,6 @@
* :ref:`correlation_context_specific`
.. change::
- :tags: feature, postgresql
- :tickets: 2676
-
- Added support for Postgresql's traditional SUBSTRING
- function syntax, renders as "SUBSTRING(x FROM y FOR z)"
- when regular ``func.substring()`` is used.
- Also in 0.7.11. Courtesy Gunnlaugur Þór Briem.
-
- .. change::
:tags: feature, orm
:tickets: 2675
@@ -824,7 +1318,6 @@
.. change::
:tags: feature, mysql
- :pullreq: 42
New dialect for CyMySQL added, courtesy Hajime Nakagami.
@@ -880,7 +1373,6 @@
.. change::
:tags: bug, tests
:tickets: 2669
- :pullreq: 41
Fixed an import of "logging" in test_execute which was not
working on some linux platforms. Also in 0.7.11.
@@ -987,7 +1479,6 @@
.. change::
:tags: feature, postgresql
- :pullreq: 40
Added :meth:`.postgresql.ARRAY.Comparator.any` and
:meth:`.postgresql.ARRAY.Comparator.all`
@@ -1061,7 +1552,6 @@
.. change::
:tags: mssql, feature
- :pullreq: 35
Added ``mssql_include`` and ``mssql_clustered`` options to
:class:`.Index`, renders the ``INCLUDE`` and ``CLUSTERED`` keywords,
@@ -1110,7 +1600,6 @@
.. change::
:tags: mssql, feature
:tickets: 2644
- :pullreq: 32
DDL for IDENTITY columns is now supported on
non-primary key columns, by establishing a
@@ -1143,7 +1632,7 @@
.. change::
:tags: postgresql, bug
- Fixed bug in :func:`.postgresql.array` construct whereby using it
+ Fixed bug in :class:`~sqlalchemy.dialects.postgresql.array()` construct whereby using it
inside of an :func:`.expression.insert` construct would produce an
error regarding a parameter issue in the ``self_group()`` method.
@@ -1159,14 +1648,12 @@
.. change::
:tags: mysql, feature
- :pullreq: 33
GAE dialect now accepts username/password arguments in the URL,
courtesy Owen Nelson.
.. change::
:tags: mysql, bug
- :pullreq: 33
GAE dialect won't fail on None match if the error code can't be extracted
from the exception throw; courtesy Owen Nelson.
@@ -1185,16 +1672,6 @@
:released: December 14, 2012
.. change::
- :tags: sqlite, bug
- :tickets: 2568
-
- More adjustment to this SQLite related issue which was released in
- 0.7.9, to intercept legacy SQLite quoting characters when reflecting
- foreign keys. In addition to intercepting double quotes, other
- quoting characters such as brackets, backticks, and single quotes
- are now also intercepted. Also in 0.7.10.
-
- .. change::
:tags: orm, bug
:tickets: 2635
@@ -1265,19 +1742,6 @@
.. change::
:tags: sql, bug
- :tickets: 2631
-
- Fixed bug where using server_onupdate=<FetchedValue|DefaultClause>
- without passing the "for_update=True" flag would apply the default
- object to the server_default, blowing away whatever was there.
- The explicit for_update=True argument shouldn't be needed with this usage
- (especially since the documentation shows an example without it being
- used) so it is now arranged internally using a copy of the given default
- object, if the flag isn't set to what corresponds to that argument.
- Also in 0.7.10.
-
- .. change::
- :tags: sql, bug
:tickets: 2610
Fixed bug whereby using a label_length on dialect that was smaller
@@ -1320,7 +1784,7 @@
:tags: sql, bug
:tickets: 2618
- The :class:`.DECIMAL` type now honors the "precision" and
+ The :class:`~sqlalchemy.types.DECIMAL` type now honors the "precision" and
"scale" arguments when rendering DDL.
.. change::
@@ -1436,30 +1900,10 @@
.. change::
:tags: engine
- The "reflect=True" argument to :class:`MetaData` is deprecated.
+ The "reflect=True" argument to :class:`~sqlalchemy.schema.MetaData` is deprecated.
Please use the :meth:`.MetaData.reflect` method.
.. change::
- :tags: engine, bug
- :tickets: 2604
-
- Fixed :meth:`.MetaData.reflect` to correctly use
- the given :class:`.Connection`, if given, without
- opening a second connection from that connection's
- :class:`.Engine`. Also in 0.7.10.
-
- .. change::
- :tags: mssql, bug
- :tickets: 2607
-
- Fixed bug whereby using "key" with Column
- in conjunction with "schema" for the owning
- Table would fail to locate result rows due
- to the MSSQL dialect's "schema rendering"
- logic's failure to take .key into account.
- Also in 0.7.10.
-
- .. change::
:tags: sql, bug
:tickets: 2603
@@ -1573,7 +2017,7 @@
:tickets: 2595
The auto-correlation feature of :func:`.select`, and
- by proxy that of :class:`.orm.Query`, will not
+ by proxy that of :class:`.Query`, will not
take effect for a SELECT statement that is being
rendered directly in the FROM list of the enclosing
SELECT. Correlation in SQL only applies to column
@@ -1582,7 +2026,6 @@
.. change::
:tags: sqlite
- :pullreq: 23
:changeset: c3addcc9ffad
Added :class:`.types.NCHAR`, :class:`.types.NVARCHAR`
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst
index 02a5c1d81..d59f3ec60 100644
--- a/doc/build/changelog/changelog_09.rst
+++ b/doc/build/changelog/changelog_09.rst
@@ -3,387 +3,1115 @@
0.9 Changelog
==============
+.. changelog_imports::
+
+ .. include:: changelog_08.rst
+ :start-line: 5
+
+ .. include:: changelog_07.rst
+ :start-line: 5
+
.. changelog::
- :version: 0.9.0
+ :version: 0.9.2
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2910
+
+ Options can now be specified on a :class:`.PrimaryKeyConstraint` object
+ independently of the specification of columns in the table with
+ the ``primary_key=True`` flag; use a :class:`.PrimaryKeyConstraint`
+ object with no columns in it to achieve this result.
+
+ Previously, an explicit :class:`.PrimaryKeyConstraint` would have the
+ effect of those columns marked as ``primary_key=True`` being ignored;
+ since this is no longer the case, the :class:`.PrimaryKeyConstraint`
+ will now assert that either one style or the other is used to specify
+ the columns, or if both are present, that the column lists match
+ exactly. If an inconsistent set of columns in the
+ :class:`.PrimaryKeyConstraint`
+ and within the :class:`.Table` marked as ``primary_key=True`` are
+ present, a warning is emitted, and the list of columns is taken
+ only from the :class:`.PrimaryKeyConstraint` alone as was the case
+ in previous releases.
+
+
+
+ .. seealso::
+
+ :class:`.PrimaryKeyConstraint`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2866
+
+ The system by which schema constructs and certain SQL constructs
+ accept dialect-specific keyword arguments has been enhanced. This
+ system includes commonly the :class:`.Table` and :class:`.Index` constructs,
+ which accept a wide variety of dialect-specific arguments such as
+ ``mysql_engine`` and ``postgresql_where``, as well as the constructs
+ :class:`.PrimaryKeyConstraint`, :class:`.UniqueConstraint`,
+ :class:`.Update`, :class:`.Insert` and :class:`.Delete`, and also
+ newly added kwarg capability to :class:`.ForeignKeyConstraint`
+ and :class:`.ForeignKey`. The change is that participating dialects
+ can now specify acceptable argument lists for these constructs, allowing
+ an argument error to be raised if an invalid keyword is specified for
+ a particular dialect. If the dialect portion of the keyword is unrecognized,
+ a warning is emitted only; while the system will actually make use
+ of setuptools entrypoints in order to locate non-local dialects,
+ the use case where certain dialect-specific arguments are used
+ in an environment where that third-party dialect is uninstalled remains
+ supported. Dialects also have to explicitly opt-in to this system,
+ so that external dialects which aren't making use of this system
+ will remain unaffected.
.. change::
:tags: bug, sql
- :tickets: 2784
+ :pullreq: bitbucket:11
+
+ A :class:`.UniqueConstraint` created inline with a :class:`.Table`
+ that has no columns within it will be skipped. Pullreq courtesy
+ Derek Harland.
+
+ .. change::
+ :tags: feature, mssql
+ :pullreq: bitbucket:11
+
+ Added an option ``mssql_clustered`` to the :class:`.UniqueConstraint`
+ and :class:`.PrimaryKeyConstraint` constructs; on SQL Server, this adds
+ the ``CLUSTERED`` keyword to the constraint construct within DDL.
+ Pullreq courtesy Derek Harland.
+
+ .. change::
+ :tags: bug, sql, orm
+ :tickets: 2912
+
+ Fixed the multiple-table "UPDATE..FROM" construct, only usable on
+ MySQL, to correctly render the SET clause among multiple columns
+ with the same name across tables. This also changes the name used for
+ the bound parameter in the SET clause to "<tablename>_<colname>" for
+ the non-primary table only; as this parameter is typically specified
+ using the :class:`.Column` object directly this should not have an
+ impact on applications. The fix takes effect for both
+ :meth:`.Table.update` as well as :meth:`.Query.update` in the ORM.
- Fixed bug in :class:`.CheckConstraint` DDL where the "quote" flag from a
- :class:`.Column` object would not be propagated. Also in 0.8.3, 0.7.11.
+ .. change::
+ :tags: bug, oracle
+ :tickets: 2911
+
+ It's been observed that the usage of a cx_Oracle "outputtypehandler"
+ in Python 2.xx in order to coerce string values to Unicode is inordinately
+ expensive; even though cx_Oracle is written in C, when you pass the
+ Python ``unicode`` primitive to cursor.var() and associate with an output
+ handler, the library counts every conversion as a Python function call
+ with all the requisite overhead being recorded; this *despite* the fact
+ when running in Python 3, all strings are also unconditionally coerced
+ to unicode but it does *not* incur this overhead,
+ meaning that cx_Oracle is failing to use performant techniques in Py2K.
+ As SQLAlchemy cannot easily select for this style of type handler on a
+ per-column basis, the handler was assembled unconditionally thereby
+ adding the overhead to all string access.
+
+ So this logic has been replaced with SQLAlchemy's own unicode
+ conversion system, which now
+ only takes effect in Py2K for columns that are requested as unicode.
+ When C extensions are used, SQLAlchemy's system appears to be 2-3x faster than
+ cx_Oracle's. Additionally, SQLAlchemy's unicode conversion has been
+ enhanced such that when the "conditional" converter is required
+ (now needed for the Oracle backend), the check for "already unicode" is now
+ performed in C and no longer introduces significant overhead.
+
+ This change has two impacts on the cx_Oracle backend. One is that
+ string values in Py2K which aren't specifically requested with the
+ Unicode type or convert_unicode=True will now come back as ``str``,
+ not ``unicode`` - this behavior is similar to a backend such as
+ MySQL. Additionally, when unicode values are requested with the cx_Oracle
+ backend, if the C extensions are *not* used, there is now an additional
+ overhead of an isinstance() check per column. This tradeoff has been
+ made as it can be worked around and no longer places a performance burden
+ on the likely majority of Oracle result columns that are non-unicode
+ strings.
.. change::
:tags: bug, orm
- :tickets: 2778
+ :tickets: 2908
+
+ Fixed a bug involving the new flattened JOIN structures which
+ are used with :func:`.joinedload()` (thereby causing a regression
+ in joined eager loading) as well as :func:`.aliased`
+ in conjunction with the ``flat=True`` flag and joined-table inheritance;
+ basically multiple joins across a "parent JOIN sub" entity using different
+ paths to get to a target class wouldn't form the correct ON conditions.
+ An adjustment / simplification made in the mechanics of figuring
+ out the "left side" of the join in the case of an aliased, joined-inh
+ class repairs the issue.
- A performance fix related to the usage of the :func:`.defer` option
- when loading mapped entities. The function overhead of applying
- a per-object deferred callable to an instance at load time was
- significantly higher than that of just loading the data from the row
- (note that ``defer()`` is meant to reduce DB/network overhead, not
- necessarily function call count); the function call overhead is now
- less than that of loading data from the column in all cases. There
- is also a reduction in the number of "lazy callable" objects created
- per load from N (total deferred values in the result) to 1 (total
- number of deferred cols). Also in 0.8.3.
+ .. change::
+ :tags: bug, mysql
+
+ The MySQL CAST compilation now takes into account aspects of a string
+ type such as "charset" and "collation". While MySQL wants all character-
+ based CAST calls to use the CHAR type, we now create a real CHAR
+ object at CAST time and copy over all the parameters it has, so that
+ an expression like ``cast(x, mysql.TEXT(charset='utf8'))`` will
+ render ``CAST(t.col AS CHAR CHARACTER SET utf8)``.
.. change::
- :tags: bug, sqlite
- :tickets: 2781
+ :tags: bug, mysql
+ :tickets: 2906
+
+ Added new "unicode returns" detection to the MySQL dialect and
+ to the default dialect system overall, such that any dialect
+ can add extra "tests" to the on-first-connect "does this DBAPI
+ return unicode directly?" detection. In this case, we are
+ adding a check specifically against the "utf8" encoding with
+ an explicit "utf8_bin" collation type (after checking that
+ this collation is available) to test for some buggy unicode
+ behavior observed with MySQLdb version 1.2.3. While MySQLdb
+ has resolved this issue as of 1.2.4, the check here should
+ guard against regressions. The change also allows the "unicode"
+ checks to log in the engine logs, which was not previously
+ the case.
- The newly added SQLite DATETIME arguments storage_format and
- regexp apparently were not fully implemented correctly; while the
- arguments were accepted, in practice they would have no effect;
- this has been fixed. Also in 0.8.3.
+ .. change::
+ :tags: bug, mysql, pool, engine
+ :tickets: 2907
+
+ :class:`.Connection` now associates a new
+ :class:`.RootTransaction` or :class:`.TwoPhaseTransaction`
+ with its immediate :class:`._ConnectionFairy` as a "reset handler"
+ for the span of that transaction, which takes over the task
+ of calling commit() or rollback() for the "reset on return" behavior
+ of :class:`.Pool` if the transaction was not otherwise completed.
+ This resolves the issue that a picky transaction
+ like that of MySQL two-phase will be
+ properly closed out when the connection is closed without an
+ explicit rollback or commit (e.g. no longer raises "XAER_RMFAIL"
+ in this case - note this only shows up in logging as the exception
+ is not propagated within pool reset).
+ This issue would arise e.g. when using an orm
+ :class:`.Session` with ``twophase`` set, and then
+ :meth:`.Session.close` is called without an explicit rollback or
+ commit. The change also has the effect that you will now see
+ an explicit "ROLLBACK" in the logs when using a :class:`.Session`
+ object in non-autocommit mode regardless of how that session was
+ discarded. Thanks to Jeff Dairiki and Laurence Rowe for isolating
+ the issue here.
.. change::
- :tags: bug, sql, postgresql
- :tickets: 2780
+ :tags: feature, pool, engine
- Fixed bug where the expression system relied upon the ``str()``
- form of a some expressions when referring to the ``.c`` collection
- on a ``select()`` construct, but the ``str()`` form isn't available
- since the element relies on dialect-specific compilation constructs,
- notably the ``__getitem__()`` operator as used with a Postgresql
- ``ARRAY`` element. The fix also adds a new exception class
- :class:`.UnsupportedCompilationError` which is raised in those cases
- where a compiler is asked to compile something it doesn't know
- how to. Also in 0.8.3.
+ Added a new pool event :meth:`.PoolEvents.invalidate`. Called when
+ a DBAPI connection is to be marked as "invaldated" and discarded
+ from the pool.
.. change::
- :tags: bug, engine, oracle
- :tickets: 2776
+ :tags: bug, pool
- Dialect.initialize() is not called a second time if an :class:`.Engine`
- is recreated, due to a disconnect error. This fixes a particular
- issue in the Oracle 8 dialect, but in general the dialect.initialize()
- phase should only be once per dialect. Also in 0.8.3.
+ The argument names for the :meth:`.PoolEvents.reset` event have been
+ renamed to ``dbapi_connection`` and ``connection_record`` in order
+ to maintain consistency with all the other pool events. It is expected
+ that any existing listeners for this relatively new and
+ seldom-used event are using positional style to receive arguments in
+ any case.
.. change::
- :tags: feature, sql
- :tickets: 722
+ :tags: bug, py3k, cextensions
+ :pullreq: github:55
- Added new method to the :func:`.insert` construct
- :meth:`.Insert.from_select`. Given a list of columns and
- a selectable, renders ``INSERT INTO (table) (columns) SELECT ..``.
- While this feature is highlighted as part of 0.9 it is also
- backported to 0.8.3.
+ Fixed an issue where the C extensions in Py3K are using the wrong API
+ to specify the top-level module function, which breaks
+ in Python 3.4b2. Py3.4b2 changes PyMODINIT_FUNC to return
+ "void" instead of "PyObject *", so we now make sure to use
+ "PyMODINIT_FUNC" instead of "PyObject *" directly. Pull request
+ courtesy cgohlke.
+
+ .. change::
+ :tags: bug, schema
+ :pullreq: github:57
+
+ Restored :class:`sqlalchemy.schema.SchemaVisitor` to the ``.schema``
+ module. Pullreq courtesy Sean Dague.
+
+.. changelog::
+ :version: 0.9.1
+ :released: January 5, 2014
+
+ .. change::
+ :tags: bug, orm, events
+ :tickets: 2905
+
+ Fixed regression where using a ``functools.partial()`` with the event
+ system would cause a recursion overflow due to usage of inspect.getargspec()
+ on it in order to detect a legacy calling signature for certain events,
+ and apparently there's no way to do this with a partial object. Instead
+ we skip the legacy check and assume the modern style; the check itself
+ now only occurs for the SessionEvents.after_bulk_update and
+ SessionEvents.after_bulk_delete events. Those two events will require
+ the new signature style if assigned to a "partial" event listener.
+
+ .. change::
+ :tags: feature, orm, extensions
+
+ A new, **experimental** extension :mod:`sqlalchemy.ext.automap` is added.
+ This extension expands upon the functionality of Declarative as well as
+ the :class:`.DeferredReflection` class to produce a base class which
+ automatically generates mapped classes *and relationships* based on
+ table metadata.
.. seealso::
- :ref:`feature_722`
+ :ref:`feature_automap`
+
+ :ref:`automap_toplevel`
.. change::
:tags: feature, sql
- The :func:`.update`, :func:`.insert`, and :func:`.delete` constructs
- will now interpret ORM entities as target tables to be operated upon,
- e.g.::
+ Conjunctions like :func:`.and_` and :func:`.or_` can now accept
+ Python generators as a single argument, e.g.::
+
+ and_(x == y for x, y in tuples)
- from sqlalchemy import insert, update, delete
+ The logic here looks for a single argument ``*args`` where the first
+ element is an instance of ``types.GeneratorType``.
- ins = insert(SomeMappedClass).values(x=5)
+ .. change::
+ :tags: feature, schema
- del_ = delete(SomeMappedClass).where(SomeMappedClass.id == 5)
+ The :paramref:`.Table.extend_existing` and :paramref:`.Table.autoload_replace`
+ parameters are now available on the :meth:`.MetaData.reflect`
+ method.
- upd = update(SomeMappedClass).where(SomeMappedClass.id == 5).values(name='ed')
+ .. change::
+ :tags: bug, orm, declarative
- Also in 0.8.3.
+ Fixed an extremely unlikely memory issue where when using
+ :class:`.DeferredReflection`
+ to define classes pending for reflection, if some subset of those
+ classes were discarded before the :meth:`.DeferredReflection.prepare`
+ method were called to reflect and map the class, a strong reference
+ to the class would remain held within the declarative internals.
+ This internal collection of "classes to map" now uses weak
+ references against the classes themselves.
.. change::
:tags: bug, orm
- :tickets: 2773
+ :pullreq: bitbucket:9
- Fixed bug whereby attribute history functions would fail
- when an object we moved from "persistent" to "pending"
- using the :func:`.make_transient` function, for operations
- involving collection-based backrefs. Also in 0.8.3.
+ Fixed bug where using new :attr:`.Session.info` attribute would fail
+ if the ``.info`` argument were only passed to the :class:`.sessionmaker`
+ creation call but not to the object itself. Courtesy Robin Schoonover.
.. change::
- :tags: bug, engine, pool
- :tickets: 2772
+ :tags: bug, orm
+ :tickets: 2901
- Fixed bug where :class:`.QueuePool` would lose the correct
- checked out count if an existing pooled connection failed to reconnect
- after an invalidate or recycle event. Also in 0.8.3.
+ Fixed regression where we don't check the given name against the
+ correct string class when setting up a backref based on a name,
+ therefore causing the error "too many values to unpack". This was
+ related to the Py3k conversion.
+
+ .. change::
+ :tags: bug, orm, declarative
+ :tickets: 2900
+
+ A quasi-regression where apparently in 0.8 you can set a class-level
+ attribute on declarative to simply refer directly to an :class:`.InstrumentedAttribute`
+ on a superclass or on the class itself, and it
+ acts more or less like a synonym; in 0.9, this fails to set up enough
+ bookkeeping to keep up with the more liberalized backref logic
+ from :ticket:`2789`. Even though this use case was never directly
+ considered, it is now detected by declarative at the "setattr()" level
+ as well as when setting up a subclass, and the mirrored/renamed attribute
+ is now set up as a :func:`.synonym` instead.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2903
+
+ Fixed regression where we apparently still create an implicit
+ alias when saying query(B).join(B.cs), where "C" is a joined inh
+ class; however, this implicit alias was created only considering
+ the immediate left side, and not a longer chain of joins along different
+ joined-inh subclasses of the same base. As long as we're still
+ implicitly aliasing in this case, the behavior is dialed back a bit
+ so that it will alias the right side in a wider variety of cases.
+
+.. changelog::
+ :version: 0.9.0
+ :released: December 30, 2013
+
+ .. change::
+ :tags: bug, orm, declarative
+ :tickets: 2828
+
+ Declarative does an extra check to detect if the same
+ :class:`.Column` is mapped multiple times under different properties
+ (which typically should be a :func:`.synonym` instead) or if two
+ or more :class:`.Column` objects are given the same name, raising
+ a warning if this condition is detected.
+
+ .. change::
+ :tags: bug, firebird
+ :tickets: 2898
+
+ Changed the queries used by Firebird to list table and view names
+ to query from the ``rdb$relations`` view instead of the
+ ``rdb$relation_fields`` and ``rdb$view_relations`` views.
+ Variants of both the old and new queries are mentioned on many
+ FAQ and blogs, however the new queries are taken straight from
+ the "Firebird FAQ" which appears to be the most official source
+ of info.
.. change::
:tags: bug, mysql
- :tickets: 2768
+ :tickets: 2893
- Fixed bug when using multi-table UPDATE where a supplemental
- table is a SELECT with its own bound parameters, where the positioning
- of the bound parameters would be reversed versus the statement
- itself when using MySQL's special syntax. Also in 0.8.2.
+ Improvements to the system by which SQL types generate within
+ ``__repr__()``, particularly with regards to the MySQL integer/numeric/
+ character types which feature a wide variety of keyword arguments.
+ The ``__repr__()`` is important for use with Alembic autogenerate
+ for when Python code is rendered in a migration script.
.. change::
- :tags: bug, sqlite
- :tickets: 2764
+ :tags: feature, postgresql
+ :tickets: 2581
+ :pullreq: github:50
- Added :class:`.BIGINT` to the list of type names that can be
- reflected by the SQLite dialect; courtesy Russell Stuart.
- Also in 0.8.2.
+ Support for Postgresql JSON has been added, using the new
+ :class:`.JSON` type. Huge thanks to Nathan Rice for
+ implementing and testing this.
.. change::
- :tags: feature, orm, declarative
- :tickets: 2761
+ :tags: bug, sql
- ORM descriptors such as hybrid properties can now be referenced
- by name in a string argument used with ``order_by``,
- ``primaryjoin``, or similar in :func:`.relationship`,
- in addition to column-bound attributes. Also in 0.8.2.
+ The :func:`.cast` function, when given a plain literal value,
+ will now apply the given type to the given literal value on the
+ bind parameter side according to the type given to the cast,
+ in the same manner as that of the :func:`.type_coerce` function.
+ However unlike :func:`.type_coerce`, this only takes effect if a
+ non-clauseelement value is passed to :func:`.cast`; an existing typed
+ construct will retain its type.
.. change::
- :tags: feature, engine
- :tickets: 2770
+ :tags: bug, postgresql
- New events added to :class:`.ConnectionEvents`:
+ Now using psycopg2 UNICODEARRAY extension for handling unicode arrays
+ with psycopg2 + normal "native unicode" mode, in the same way the
+ UNICODE extension is used.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2883
+
+ The :class:`.ForeignKey` class more aggressively checks the given
+ column argument. If not a string, it checks that the object is
+ at least a :class:`.ColumnClause`, or an object that resolves to one,
+ and that the ``.table`` attribute, if present, refers to a
+ :class:`.TableClause` or subclass, and not something like an
+ :class:`.Alias`. Otherwise, a :class:`.ArgumentError` is raised.
- * :meth:`.ConnectionEvents.engine_connect`
- * :meth:`.ConnectionEvents.set_connection_execution_options`
- * :meth:`.ConnectionEvents.set_engine_execution_options`
.. change::
- :tags: feature, firebird
- :tickets: 2763
+ :tags: feature, orm
- Added new flag ``retaining=False`` to the kinterbasdb and fdb dialects.
- This controls the value of the ``retaining`` flag sent to the
- ``commit()`` and ``rollback()`` methods of the DBAPI connection.
- Defaults to False. Also in 0.8.2, where it defaults to True.
+ The :class:`.exc.StatementError` or DBAPI-related subclass
+ now can accomodate additional information about the "reason" for
+ the exception; the :class:`.Session` now adds some detail to it
+ when the exception occurs within an autoflush. This approach
+ is taken as opposed to combining :class:`.FlushError` with
+ a Python 3 style "chained exception" approach so as to maintain
+ compatibility both with Py2K code as well as code that already
+ catches ``IntegrityError`` or similar.
.. change::
- :tags: requirements
+ :tags: feature, postgresql
+ :pullreq: bitbucket:8
- The Python `mock <https://pypi.python.org/pypi/mock>`_ library
- is now required in order to run the unit test suite. While part
- of the standard library as of Python 3.3, previous Python installations
- will need to install this in order to run unit tests or to
- use the ``sqlalchemy.testing`` package for external dialects.
- This applies to 0.8.2 as well.
+ Added support for Postgresql TSVECTOR via the
+ :class:`.postgresql.TSVECTOR` type. Pull request courtesy
+ Noufal Ibrahim.
.. change::
- :tags: bug, orm
- :tickets: 2750
+ :tags: feature, engine
+ :tickets: 2875
- A warning is emitted when trying to flush an object of an inherited
- mapped class where the polymorphic discriminator has been assigned
- to a value that is invalid for the class. Also in 0.8.2.
+ The :func:`.engine_from_config` function has been improved so that
+ we will be able to parse dialect-specific arguments from string
+ configuration dictionaries. Dialect classes can now provide their
+ own list of parameter types and string-conversion routines.
+ The feature is not yet used by the built-in dialects, however.
.. change::
- :tags: bug, postgresql
- :tickets: 2740
+ :tags: bug, sql
+ :tickets: 2879
+
+ The precedence rules for the :meth:`.ColumnOperators.collate` operator
+ have been modified, such that the COLLATE operator is now of lower
+ precedence than the comparison operators. This has the effect that
+ a COLLATE applied to a comparison will not render parenthesis
+ around the comparison, which is not parsed by backends such as
+ MSSQL. The change is backwards incompatible for those setups that
+ were working around the issue by applying :meth:`.Operators.collate`
+ to an individual element of the comparison expression,
+ rather than the comparison expression as a whole.
- The behavior of :func:`.extract` has been simplified on the
- Postgresql dialect to no longer inject a hardcoded ``::timestamp``
- or similar cast into the given expression, as this interfered
- with types such as timezone-aware datetimes, but also
- does not appear to be at all necessary with modern versions
- of psycopg2. Also in 0.8.2.
+ .. seelalso::
+
+ :ref:`migration_2879`
.. change::
- :tags: bug, firebird
- :tickets: 2757
+ :tags: bug, orm, declarative
+ :tickets: 2865
- Type lookup when reflecting the Firebird types LONG and
- INT64 has been fixed so that LONG is treated as INTEGER,
- INT64 treated as BIGINT, unless the type has a "precision"
- in which case it's treated as NUMERIC. Patch courtesy
- Russell Stuart. Also in 0.8.2.
+ The :class:`.DeferredReflection` class has been enhanced to provide
+ automatic reflection support for the "secondary" table referred
+ to by a :func:`.relationship`. "secondary", when specified
+ either as a string table name, or as a :class:`.Table` object with
+ only a name and :class:`.MetaData` object will also be included
+ in the reflection process when :meth:`.DeferredReflection.prepare`
+ is called.
.. change::
- :tags: bug, postgresql
- :tickets: 2766
+ :tags: feature, orm, backrefs
+ :tickets: 1535
+
+ Added new argument ``include_backrefs=True`` to the
+ :func:`.validates` function; when set to False, a validation event
+ will not be triggered if the event was initated as a backref to
+ an attribute operation from the other side.
+
+ .. seealso::
+
+ :ref:`feature_1535`
+
+ .. change::
+ :tags: bug, orm, collections, py3k
+ :pullreq: github:40
- Fixed bug in HSTORE type where keys/values that contained
- backslashed quotes would not be escaped correctly when
- using the "non native" (i.e. non-psycopg2) means
- of translating HSTORE data. Patch courtesy Ryan Kelly.
- Also in 0.8.2.
+ Added support for the Python 3 method ``list.clear()`` within
+ the ORM collection instrumentation system; pull request
+ courtesy Eduardo Schettino.
.. change::
:tags: bug, postgresql
- :tickets: 2767
+ :tickets: 2878
- Fixed bug where the order of columns in a multi-column
- Postgresql index would be reflected in the wrong order.
- Courtesy Roman Podolyaka. Also in 0.8.2.
+ Fixed bug where values within an ENUM weren't escaped for single
+ quote signs. Note that this is backwards-incompatible for existing
+ workarounds that manually escape the single quotes.
+
+ .. seealso::
+
+ :ref:`migration_2878`
.. change::
- :tags: bug, sql
- :tickets: 2746, 2668
+ :tags: bug, orm, declarative
- Multiple fixes to the correlation behavior of
- :class:`.Select` constructs, first introduced in 0.8.0:
+ Fixed bug where in Py2K a unicode literal would not be accepted
+ as the string name of a class or other argument within
+ declarative using :func:`.relationship`.
- * To satisfy the use case where FROM entries should be
- correlated outwards to a SELECT that encloses another,
- which then encloses this one, correlation now works
- across multiple levels when explicit correlation is
- established via :meth:`.Select.correlate`, provided
- that the target select is somewhere along the chain
- contained by a WHERE/ORDER BY/columns clause, not
- just nested FROM clauses. This makes
- :meth:`.Select.correlate` act more compatibly to
- that of 0.7 again while still maintaining the new
- "smart" correlation.
+ .. change::
+ :tags: feature, sql
+ :tickets: 2877, 2882
- * When explicit correlation is not used, the usual
- "implicit" correlation limits its behavior to just
- the immediate enclosing SELECT, to maximize compatibility
- with 0.7 applications, and also prevents correlation
- across nested FROMs in this case, maintaining compatibility
- with 0.8.0/0.8.1.
+ New improvements to the :func:`.text` construct, including
+ more flexible ways to set up bound parameters and return types;
+ in particular, a :func:`.text` can now be turned into a full
+ FROM-object, embeddable in other statements as an alias or CTE
+ using the new method :meth:`.TextClause.columns`. The :func:`.text`
+ construct can also render "inline" bound parameters when the construct
+ is compiled in a "literal bound" context.
- * The :meth:`.Select.correlate_except` method was not
- preventing the given FROM clauses from correlation in
- all cases, and also would cause FROM clauses to be incorrectly
- omitted entirely (more like what 0.7 would do),
- this has been fixed.
+ .. seealso::
- * Calling `select.correlate_except(None)` will enter
- all FROM clauses into correlation as would be expected.
+ :ref:`feature_2877`
+
+ .. change::
+ :tags: feature, sql
+ :pullreq: github:42
+
+ A new API for specifying the ``FOR UPDATE`` clause of a ``SELECT``
+ is added with the new :meth:`.GenerativeSelect.with_for_update` method.
+ This method supports a more straightforward system of setting
+ dialect-specific options compared to the ``for_update`` keyword
+ argument of :func:`.select`, and also includes support for the
+ SQL standard ``FOR UPDATE OF`` clause. The ORM also includes
+ a new corresponding method :meth:`.Query.with_for_update`.
+ Pull request courtesy Mario Lassnig.
+
+ .. seealso::
- Also in 0.8.2.
+ :ref:`feature_github_42`
.. change::
- :tags: bug, ext
+ :tags: feature, orm
+ :pullreq: github:42
+
+ A new API for specifying the ``FOR UPDATE`` clause of a ``SELECT``
+ is added with the new :meth:`.Query.with_for_update` method,
+ to complement the new :meth:`.GenerativeSelect.with_for_update` method.
+ Pull request courtesy Mario Lassnig.
+
+ .. seealso::
- Fixed bug whereby if a composite type were set up
- with a function instead of a class, the mutable extension
- would trip up when it tried to check that column
- for being a :class:`.MutableComposite` (which it isn't).
- Courtesy asldevi. Also in 0.8.2.
+ :ref:`feature_github_42`
.. change::
- :tags: bug, sql
- :tickets: 1765
+ :tags: bug, engine
+ :tickets: 2873
- The resolution of :class:`.ForeignKey` objects to their
- target :class:`.Column` has been reworked to be as
- immediate as possible, based on the moment that the
- target :class:`.Column` is associated with the same
- :class:`.MetaData` as this :class:`.ForeignKey`, rather
- than waiting for the first time a join is constructed,
- or similar. This along with other improvements allows
- earlier detection of some foreign key configuration
- issues. Also included here is a rework of the
- type-propagation system, so that
- it should be reliable now to set the type as ``None``
- on any :class:`.Column` that refers to another via
- :class:`.ForeignKey` - the type will be copied from the
- target column as soon as that other column is associated,
- and now works for composite foreign keys as well.
+ The :func:`.create_engine` routine and the related
+ :func:`.make_url` function no longer considers the ``+`` sign
+ to be a space within the password field. The parsing has been
+ adjuted to match RFC 1738 exactly, in that both ``username``
+ and ``password`` expect only ``:``, ``@``, and ``/`` to be
+ encoded.
.. seealso::
- :ref:`migration_1765`
+ :ref:`migration_2873`
+
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 2872
+
+ Some refinements to the :class:`.AliasedClass` construct with regards
+ to descriptors, like hybrids, synonyms, composites, user-defined
+ descriptors, etc. The attribute
+ adaptation which goes on has been made more robust, such that if a descriptor
+ returns another instrumented attribute, rather than a compound SQL
+ expression element, the operation will still proceed.
+ Addtionally, the "adapted" operator will retain its class; previously,
+ a change in class from ``InstrumentedAttribute`` to ``QueryableAttribute``
+ (a superclass) would interact with Python's operator system such that
+ an expression like ``aliased(MyClass.x) > MyClass.x`` would reverse itself
+ to read ``myclass.x < myclass_1.x``. The adapted attribute will also
+ refer to the new :class:`.AliasedClass` as its parent which was not
+ always the case before.
.. change::
:tags: feature, sql
- :tickets: 2744, 2734
+ :tickets: 2867
- Provided a new attribute for :class:`.TypeDecorator`
- called :attr:`.TypeDecorator.coerce_to_is_types`,
- to make it easier to control how comparisons using
- ``==`` or ``!=`` to ``None`` and boolean types goes
- about producing an ``IS`` expression, or a plain
- equality expression with a bound parameter.
+ The precision used when coercing a returned floating point value to
+ Python ``Decimal`` via string is now configurable. The
+ flag ``decimal_return_scale`` is now supported by all :class:`.Numeric`
+ and :class:`.Float` types, which will ensure this many digits are taken
+ from the native floating point value when it is converted to string.
+ If not present, the type will make use of the value of ``.scale``, if
+ the type supports this setting and it is non-None. Otherwise the original
+ default length of 10 is used.
+
+ .. seealso::
+ :ref:`feature_2867`
.. change::
- :tags: feature, postgresql
+ :tags: bug, schema
+ :tickets: 2868
- Support for Postgresql 9.2 range types has been added.
- Currently, no type translation is provided, so works
- directly with strings or psycopg2 2.5 range extension types
- at the moment. Patch courtesy Chris Withers.
+ Fixed a regression caused by :ticket:`2812` where the repr() for
+ table and column names would fail if the name contained non-ascii
+ characters.
.. change::
- :tags: bug, examples
+ :tags: bug, engine
+ :tickets: 2848
- Fixed an issue with the "versioning" recipe whereby a many-to-one
- reference could produce a meaningless version for the target,
- even though it was not changed, when backrefs were present.
- Patch courtesy Matt Chisholm. Also in 0.8.2.
+ The :class:`.RowProxy` object is now sortable in Python as a regular
+ tuple is; this is accomplished via ensuring tuple() conversion on
+ both sides within the ``__eq__()`` method as well as
+ the addition of a ``__lt__()`` method.
+
+ .. seealso::
+
+ :ref:`migration_2848`
.. change::
- :tags: feature, postgresql
- :tickets: 2072
+ :tags: bug, orm
+ :tickets: 2833
+
+ The ``viewonly`` flag on :func:`.relationship` will now prevent
+ attribute history from being written on behalf of the target attribute.
+ This has the effect of the object not being written to the
+ Session.dirty list if it is mutated. Previously, the object would
+ be present in Session.dirty, but no change would take place on behalf
+ of the modified attribute during flush. The attribute still emits
+ events such as backref events and user-defined events and will still
+ receive mutations from backrefs.
+
+ .. seealso::
- Added support for "AUTOCOMMIT" isolation when using the psycopg2
- DBAPI. The keyword is available via the ``isolation_level``
- execution option. Patch courtesy Roman Podolyaka.
- Also in 0.8.2.
+ :ref:`migration_2833`
.. change::
:tags: bug, orm
- :tickets: 2759
- Fixed bug in polymorphic SQL generation where multiple joined-inheritance
- entities against the same base class joined to each other as well
- would not track columns on the base table independently of each other if
- the string of joins were more than two entities long. Also in 0.8.2.
+ Added support for new :attr:`.Session.info` attribute to
+ :class:`.scoped_session`.
.. change::
- :tags: bug, engine
- :pullreq: 6
+ :tags: removed
- Fixed bug where the ``reset_on_return`` argument to various :class:`.Pool`
- implementations would not be propagated when the pool was regenerated.
- Courtesy Eevee. Also in 0.8.2.
+ The "informix" and "informixdb" dialects have been removed; the code
+ is now available as a separate repository on Bitbucket. The IBM-DB
+ project has provided production-level Informix support since the
+ informixdb dialect was first added.
.. change::
:tags: bug, orm
- :tickets: 2754
- Fixed bug where sending a composite attribute into :meth:`.Query.order_by`
- would produce a parenthesized expression not accepted by some databases.
- Also in 0.8.2.
+ Fixed bug where usage of new :class:`.Bundle` object would cause
+ the :attr:`.Query.column_descriptions` attribute to fail.
+
+ .. change::
+ :tags: bug, examples
+
+ Fixed bug which prevented history_meta recipe from working with
+ joined inheritance schemes more than one level deep.
+
+ .. change::
+ :tags: bug, orm, sql, sqlite
+ :tickets: 2858
+
+ Fixed a regression introduced by the join rewriting feature of
+ :ticket:`2369` and :ticket:`2587` where a nested join with one side
+ already an aliased select would fail to translate the ON clause on the
+ outside correctly; in the ORM this could be seen when using a
+ SELECT statement as a "secondary" table.
+
+.. changelog::
+ :version: 0.9.0b1
+ :released: October 26, 2013
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2810
+
+ The association proxy now returns ``None`` when fetching a scalar
+ attribute off of a scalar relationship, where the scalar relationship
+ itself points to ``None``, instead of raising an ``AttributeError``.
+
+ .. seealso::
+
+ :ref:`migration_2810`
+
+ .. change::
+ :tags: feature, sql, postgresql, mysql
+ :tickets: 2183
+
+ The Postgresql and MySQL dialects now support reflection/inspection
+ of foreign key options, including ON UPDATE, ON DELETE. Postgresql
+ also reflects MATCH, DEFERRABLE, and INITIALLY. Coutesy ijl.
+
+ .. change::
+ :tags: bug, mysql
+ :tickets: 2839
+
+ Fix and test parsing of MySQL foreign key options within reflection;
+ this complements the work in :ticket:`2183` where we begin to support
+ reflection of foreign key options such as ON UPDATE/ON DELETE
+ cascade.
.. change::
:tags: bug, orm
- :tickets: 2755
+ :tickets: 2787
+
+ :func:`.attributes.get_history()` when used with a scalar column-mapped
+ attribute will now honor the "passive" flag
+ passed to it; as this defaults to ``PASSIVE_OFF``, the function will
+ by default query the database if the value is not present.
+ This is a behavioral change vs. 0.8.
+
+ .. seealso::
+
+ :ref:`change_2787`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2787
+
+ Added new method :meth:`.AttributeState.load_history`, works like
+ :attr:`.AttributeState.history` but also fires loader callables.
+
+ .. seealso::
+
+ :ref:`change_2787`
+
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2850
+
+ A :func:`.bindparam` construct with a "null" type (e.g. no type
+ specified) is now copied when used in a typed expression, and the
+ new copy is assigned the actual type of the compared column. Previously,
+ this logic would occur on the given :func:`.bindparam` in place.
+ Additionally, a similar process now occurs for :func:`.bindparam` constructs
+ passed to :meth:`.ValuesBase.values` for an :class:`.Insert` or
+ :class:`.Update` construct, within the compilation phase of the
+ construct.
- Fixed the interaction between composite attributes and
- the :func:`.aliased` function. Previously, composite attributes
- wouldn't work correctly in comparison operations when aliasing
- was applied. Also in 0.8.2.
+ These are both subtle behavioral changes which may impact some
+ usages.
+
+ .. seealso::
+
+ :ref:`migration_2850`
.. change::
:tags: feature, sql
- :tickets: 1443
+ :tickets: 2804, 2823, 2734
+
+ An overhaul of expression handling for special symbols particularly
+ with conjunctions, e.g.
+ ``None`` :func:`.expression.null` :func:`.expression.true`
+ :func:`.expression.false`, including consistency in rendering NULL
+ in conjunctions, "short-circuiting" of :func:`.and_` and :func:`.or_`
+ expressions which contain boolean constants, and rendering of
+ boolean constants and expressions as compared to "1" or "0" for backends
+ that don't feature ``true``/``false`` constants.
- Added support for "unique constraint" reflection, via the
- :meth:`.Inspector.get_unique_constraints` method.
- Thanks for Roman Podolyaka for the patch.
+ .. seealso::
+
+ :ref:`migration_2804`
.. change::
- :tags: feature, pool
- :tickets: 2752
+ :tags: feature, sql
+ :tickets: 2838
- Added pool logging for "rollback-on-return" and the less used
- "commit-on-return". This is enabled with the rest of pool
- "debug" logging.
+ The typing system now handles the task of rendering "literal bind" values,
+ e.g. values that are normally bound parameters but due to context must
+ be rendered as strings, typically within DDL constructs such as
+ CHECK constraints and indexes (note that "literal bind" values
+ become used by DDL as of :ticket:`2742`). A new method
+ :meth:`.TypeEngine.literal_processor` serves as the base, and
+ :meth:`.TypeDecorator.process_literal_param` is added to allow wrapping
+ of a native literal rendering method.
+
+ .. seealso::
+
+ :ref:`change_2838`
.. change::
- :tags: bug, mysql
- :tickets: 2715
+ :tags: feature, sql
+ :tickets: 2716
- Added another conditional to the ``mysql+gaerdbms`` dialect to
- detect so-called "development" mode, where we should use the
- ``rdbms_mysqldb`` DBAPI. Patch courtesy Brett Slatkin.
- Also in 0.8.2.
+ The :meth:`.Table.tometadata` method now produces copies of
+ all :attr:`.SchemaItem.info` dictionaries from all :class:`.SchemaItem`
+ objects within the structure including columns, constraints,
+ foreign keys, etc. As these dictionaries
+ are copies, they are independent of the original dictionary.
+ Previously, only the ``.info`` dictionary of :class:`.Column` was transferred
+ within this operation, and it was only linked in place, not copied.
+
+ .. change::
+ :tags: feature, postgresql
+ :tickets: 2840
+
+ Added support for rendering ``SMALLSERIAL`` when a :class:`.SmallInteger`
+ type is used on a primary key autoincrement column, based on server
+ version detection of Postgresql version 9.2 or greater.
.. change::
:tags: feature, mysql
- :tickets: 2704
+ :tickets: 2817
+
+ The MySQL :class:`.mysql.SET` type now features the same auto-quoting
+ behavior as that of :class:`.mysql.ENUM`. Quotes are not required when
+ setting up the value, but quotes that are present will be auto-detected
+ along with a warning. This also helps with Alembic where
+ the SET type doesn't render with quotes.
+
+ .. change::
+ :tags: feature, sql
+
+ The ``default`` argument of :class:`.Column` now accepts a class
+ or object method as an argument, in addition to a standalone function;
+ will properly detect if the "context" argument is accepted or not.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2835
+
+ The "name" attribute is set on :class:`.Index` before the "attach"
+ events are called, so that attachment events can be used to dynamically
+ generate a name for the index based on the parent table and/or
+ columns.
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 2748
+
+ The method signature of :meth:`.Dialect.reflecttable`, which in
+ all known cases is provided by :class:`.DefaultDialect`, has been
+ tightened to expect ``include_columns`` and ``exclude_columns``
+ arguments without any kw option, reducing ambiguity - previously
+ ``exclude_columns`` was missing.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2831
+
+ The erroneous kw arg "schema" has been removed from the :class:`.ForeignKey`
+ object. this was an accidental commit that did nothing; a warning is raised
+ in 0.8.3 when this kw arg is used.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 1418
+
+ Added a new load option :func:`.orm.load_only`. This allows a series
+ of column names to be specified as loading "only" those attributes,
+ deferring the rest.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 1418
+
+ The system of loader options has been entirely rearchitected to build
+ upon a much more comprehensive base, the :class:`.Load` object. This
+ base allows any common loader option like :func:`.joinedload`,
+ :func:`.defer`, etc. to be used in a "chained" style for the purpose
+ of specifying options down a path, such as ``joinedload("foo").subqueryload("bar")``.
+ The new system supersedes the usage of dot-separated path names,
+ multiple attributes within options, and the usage of ``_all()`` options.
+
+ .. seealso::
+
+ :ref:`feature_1418`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2824
+
+ The :func:`.composite` construct now maintains the return object
+ when used in a column-oriented :class:`.Query`, rather than expanding
+ out into individual columns. This makes use of the new :class:`.Bundle`
+ feature internally. This behavior is backwards incompatible; to
+ select from a composite column which will expand out, use
+ ``MyClass.some_composite.clauses``.
+
+ .. seealso::
+
+ :ref:`migration_2824`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2824
+
+ A new construct :class:`.Bundle` is added, which allows for specification
+ of groups of column expressions to a :class:`.Query` construct.
+ The group of columns are returned as a single tuple by default. The
+ behavior of :class:`.Bundle` can be overridden however to provide
+ any sort of result processing to the returned row. The behavior
+ of :class:`.Bundle` is also embedded into composite attributes now
+ when they are used in a column-oriented :class:`.Query`.
+
+ .. seealso::
+
+ :ref:`change_2824`
+
+ :ref:`migration_2824`
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2812
+
+ A rework to the way that "quoted" identifiers are handled, in that
+ instead of relying upon various ``quote=True`` flags being passed around,
+ these flags are converted into rich string objects with quoting information
+ included at the point at which they are passed to common schema constructs
+ like :class:`.Table`, :class:`.Column`, etc. This solves the issue
+ of various methods that don't correctly honor the "quote" flag such
+ as :meth:`.Engine.has_table` and related methods. The :class:`.quoted_name`
+ object is a string subclass that can also be used explicitly if needed;
+ the object will hold onto the quoting preferences passed and will
+ also bypass the "name normalization" performed by dialects that
+ standardize on uppercase symbols, such as Oracle, Firebird and DB2.
+ The upshot is that the "uppercase" backends can now work with force-quoted
+ names, such as lowercase-quoted names and new reserved words.
+
+ .. seealso::
+
+ :ref:`change_2812`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2793
+
+ The ``version_id_generator`` parameter of ``Mapper`` can now be specified
+ to rely upon server generated version identifiers, using triggers
+ or other database-provided versioning features, or via an optional programmatic
+ value, by setting ``version_id_generator=False``.
+ When using a server-generated version identfier, the ORM will use RETURNING when
+ available to immediately
+ load the new version value, else it will emit a second SELECT.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2793
+
+ The ``eager_defaults`` flag of :class:`.Mapper` will now allow the
+ newly generated default values to be fetched using an inline
+ RETURNING clause, rather than a second SELECT statement, for backends
+ that support RETURNING.
+
+ .. change::
+ :tags: feature, core
+ :tickets: 2793
+
+ Added a new variant to :meth:`.UpdateBase.returning` called
+ :meth:`.ValuesBase.return_defaults`; this allows arbitrary columns
+ to be added to the RETURNING clause of the statement without interfering
+ with the compilers usual "implicit returning" feature, which is used to
+ efficiently fetch newly generated primary key values. For supporting
+ backends, a dictionary of all fetched values is present at
+ :attr:`.ResultProxy.returned_defaults`.
+
+ .. change::
+ :tags: bug, mysql
+
+ Improved support for the cymysql driver, supporting version 0.6.5,
+ courtesy Hajime Nakagami.
+
+ .. change::
+ :tags: general
+
+ A large refactoring of packages has reorganized
+ the import structure of many Core modules as well as some aspects
+ of the ORM modules. In particular ``sqlalchemy.sql`` has been broken
+ out into several more modules than before so that the very large size
+ of ``sqlalchemy.sql.expression`` is now pared down. The effort
+ has focused on a large reduction in import cycles. Additionally,
+ the system of API functions in ``sqlalchemy.sql.expression`` and
+ ``sqlalchemy.orm`` has been reorganized to eliminate redundancy
+ in documentation between the functions vs. the objects they produce.
+
+ .. change::
+ :tags: orm, feature, orm
+
+ Added a new attribute :attr:`.Session.info` to :class:`.Session`;
+ this is a dictionary where applications can store arbitrary
+ data local to a :class:`.Session`.
+ The contents of :attr:`.Session.info` can be also be initialized
+ using the ``info`` argument of :class:`.Session` or
+ :class:`.sessionmaker`.
+
+
+ .. change::
+ :tags: feature, general, py3k
+ :tickets: 2161
+
+ The C extensions are ported to Python 3 and will build under
+ any supported CPython 2 or 3 environment.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2268
+
+ Removal of event listeners is now implemented. The feature is
+ provided via the :func:`.event.remove` function.
+
+ .. seealso::
+
+ :ref:`feature_2268`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2789
+
+ The mechanism by which attribute events pass along an
+ :class:`.AttributeImpl` as an "initiator" token has been changed;
+ the object is now an event-specific object called :class:`.attributes.Event`.
+ Additionally, the attribute system no longer halts events based
+ on a matching "initiator" token; this logic has been moved to be
+ specific to ORM backref event handlers, which are the typical source
+ of the re-propagation of an attribute event onto subsequent append/set/remove
+ operations. End user code which emulates the behavior of backrefs
+ must now ensure that recursive event propagation schemes are halted,
+ if the scheme does not use the backref handlers. Using this new system,
+ backref handlers can now peform a
+ "two-hop" operation when an object is appended to a collection,
+ associated with a new many-to-one, de-associated with the previous
+ many-to-one, and then removed from a previous collection. Before this
+ change, the last step of removal from the previous collection would
+ not occur.
+
+ .. seealso::
+
+ :ref:`migration_2789`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 722
+
+ Added new method to the :func:`.insert` construct
+ :meth:`.Insert.from_select`. Given a list of columns and
+ a selectable, renders ``INSERT INTO (table) (columns) SELECT ..``.
+ While this feature is highlighted as part of 0.9 it is also
+ backported to 0.8.3.
+
+ .. seealso::
+
+ :ref:`feature_722`
+
+ .. change::
+ :tags: feature, engine
+ :tickets: 2770
+
+ New events added to :class:`.ConnectionEvents`:
+
+ * :meth:`.ConnectionEvents.engine_connect`
+ * :meth:`.ConnectionEvents.set_connection_execution_options`
+ * :meth:`.ConnectionEvents.set_engine_execution_options`
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 1765
+
+ The resolution of :class:`.ForeignKey` objects to their
+ target :class:`.Column` has been reworked to be as
+ immediate as possible, based on the moment that the
+ target :class:`.Column` is associated with the same
+ :class:`.MetaData` as this :class:`.ForeignKey`, rather
+ than waiting for the first time a join is constructed,
+ or similar. This along with other improvements allows
+ earlier detection of some foreign key configuration
+ issues. Also included here is a rework of the
+ type-propagation system, so that
+ it should be reliable now to set the type as ``None``
+ on any :class:`.Column` that refers to another via
+ :class:`.ForeignKey` - the type will be copied from the
+ target column as soon as that other column is associated,
+ and now works for composite foreign keys as well.
- The ``mysql_length`` parameter used with :class:`.Index` can now
- be passed as a dictionary of column names/lengths, for use
- with composite indexes. Big thanks to Roman Podolyaka for the
- patch. Also in 0.8.2.
+ .. seealso::
+
+ :ref:`migration_1765`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2744, 2734
+
+ Provided a new attribute for :class:`.TypeDecorator`
+ called :attr:`.TypeDecorator.coerce_to_is_types`,
+ to make it easier to control how comparisons using
+ ``==`` or ``!=`` to ``None`` and boolean types goes
+ about producing an ``IS`` expression, or a plain
+ equality expression with a bound parameter.
+
+ .. change::
+ :tags: feature, pool
+ :tickets: 2752
+
+ Added pool logging for "rollback-on-return" and the less used
+ "commit-on-return". This is enabled with the rest of pool
+ "debug" logging.
.. change::
:tags: bug, orm, associationproxy
@@ -472,72 +1200,10 @@
are passed correctly.
.. change::
- :tags: bug, mssql
- :tickets: 2747
-
- When querying the information schema on SQL Server 2000, removed
- a CAST call that was added in 0.8.1 to help with driver issues,
- which apparently is not compatible on 2000.
- The CAST remains in place for SQL Server 2005 and greater.
- Also in 0.8.2.
-
- .. change::
- :tags: bug, mysql
- :tickets: 2721
-
- The ``deferrable`` keyword argument on :class:`.ForeignKey` and
- :class:`.ForeignKeyConstraint` will not render the ``DEFERRABLE`` keyword
- on the MySQL dialect. For a long time we left this in place because
- a non-deferrable foreign key would act very differently than a deferrable
- one, but some environments just disable FKs on MySQL, so we'll be less
- opinionated here. Also in 0.8.2.
-
- .. change::
- :tags: bug, ext, orm
- :tickets: 2730
-
- Fixed bug where :class:`.MutableDict` didn't report a change event
- when ``clear()`` was called. Also in 0.8.2
-
- .. change::
- :tags: bug, sql
- :tickets: 2738
-
- Fixed bug whereby joining a select() of a table "A" with multiple
- foreign key paths to a table "B", to that table "B", would fail
- to produce the "ambiguous join condition" error that would be
- reported if you join table "A" directly to "B"; it would instead
- produce a join condition with multiple criteria. Also in 0.8.2.
-
- .. change::
- :tags: bug, sql, reflection
- :tickets: 2728
-
- Fixed bug whereby using :meth:`.MetaData.reflect` across a remote
- schema as well as a local schema could produce wrong results
- in the case where both schemas had a table of the same name.
- Also in 0.8.2.
-
- .. change::
- :tags: bug, sql
- :tickets: 2726
-
- Removed the "not implemented" ``__iter__()`` call from the base
- :class:`.ColumnOperators` class, while this was introduced
- in 0.8.0 to prevent an endless, memory-growing loop when one also
- implements a ``__getitem__()`` method on a custom
- operator and then calls erroneously ``list()`` on that object,
- it had the effect of causing column elements to report that they
- were in fact iterable types which then throw an error when you try
- to iterate. There's no real way to have both sides here so we
- stick with Python best practices. Careful with implementing
- ``__getitem__()`` on your custom operators! Also in 0.8.2.
-
- .. change::
:tags: feature, sql
:tickets: 1068
- A :class:`.Label` construct will now render as its name alone
+ A :func:`~sqlalchemy.sql.expression.label` construct will now render as its name alone
in an ``ORDER BY`` clause, if that label is also referred to
in the columns clause of the select, instead of rewriting the
full expression. This gives the database a better chance to
@@ -558,7 +1224,7 @@
official Python driver.
.. change::
- :tags: feature, general
+ :tags: feature, general, py3k
:tickets: 2671
The codebase is now "in-place" for Python
@@ -575,16 +1241,16 @@
:tags: bug, orm
:tickets: 2736
- The "auto-aliasing" behavior of the :class:`.Query.select_from`
+ The "auto-aliasing" behavior of the :meth:`.Query.select_from`
method has been turned off. The specific behavior is now
- availble via a new method :class:`.Query.select_entity_from`.
+ availble via a new method :meth:`.Query.select_entity_from`.
The auto-aliasing behavior here was never well documented and
- is generally not what's desired, as :class:`.Query.select_from`
+ is generally not what's desired, as :meth:`.Query.select_from`
has become more oriented towards controlling how a JOIN is
- rendered. :class:`.Query.select_entity_from` will also be made
+ rendered. :meth:`.Query.select_entity_from` will also be made
available in 0.8 so that applications which rely on the auto-aliasing
can shift their applications to use this method.
.. seealso::
- :ref:`migration_2736` \ No newline at end of file
+ :ref:`migration_2736`
diff --git a/doc/build/changelog/migration_04.rst b/doc/build/changelog/migration_04.rst
index 236bfc3ce..cb53534af 100644
--- a/doc/build/changelog/migration_04.rst
+++ b/doc/build/changelog/migration_04.rst
@@ -749,10 +749,10 @@ Just like it says:
b = bindparam('foo', type_=String)
-in_ Function Changed to Accept Sequence or Selectable
------------------------------------------------------
+in\_ Function Changed to Accept Sequence or Selectable
+------------------------------------------------------
-The in_ function now takes a sequence of values or a
+The in\_ function now takes a sequence of values or a
selectable as its sole argument. The previous API of passing
in values as positional arguments still works, but is now
deprecated. This means that
diff --git a/doc/build/changelog/migration_07.rst b/doc/build/changelog/migration_07.rst
index 5bc7e74aa..207397f52 100644
--- a/doc/build/changelog/migration_07.rst
+++ b/doc/build/changelog/migration_07.rst
@@ -278,18 +278,18 @@ unchanged:
:ticket:`1923`
+.. _07_migration_mutation_extension:
+
Mutation event extension, supersedes "mutable=True"
---------------------------------------------------
-A new extension, `Mutation Tracking <http://www.sqlalchemy.o
-rg/docs/07/orm/extensions/mutable.html>`_, provides a
+A new extension, :ref:`mutable_toplevel`, provides a
mechanism by which user-defined datatypes can provide change
events back to the owning parent or parents. The extension
includes an approach for scalar database values, such as
-those managed by ``PickleType``, ``postgresql.ARRAY``, or
+those managed by :class:`.PickleType`, ``postgresql.ARRAY``, or
other custom ``MutableType`` classes, as well as an approach
-for ORM "composites", those configured using :ref:`composite()
-<mapper_composite>`_.
+for ORM "composites", those configured using :func:`~.sqlalchemy.orm.composite`.
.. seealso::
diff --git a/doc/build/changelog/migration_08.rst b/doc/build/changelog/migration_08.rst
index 971dd2f51..9f1715e28 100644
--- a/doc/build/changelog/migration_08.rst
+++ b/doc/build/changelog/migration_08.rst
@@ -522,7 +522,7 @@ accepted as a FROM clause within the core::
stmt = select([User]).where(User.id == 5)
Above, the mapped ``User`` class will expand into
-:class:`.Table` to which :class:`.User` is mapped.
+the :class:`.Table` to which ``User`` is mapped.
:ticket:`2245`
@@ -1235,7 +1235,7 @@ No more magic coercion of "=" to IN when comparing to subquery in MS-SQL
------------------------------------------------------------------------
We found a very old behavior in the MSSQL dialect which
-would attempt to rescue the user from his or herself when
+would attempt to rescue users from themselves when
doing something like this:
::
diff --git a/doc/build/changelog/migration_09.rst b/doc/build/changelog/migration_09.rst
index 424802c3d..f318b0346 100644
--- a/doc/build/changelog/migration_09.rst
+++ b/doc/build/changelog/migration_09.rst
@@ -9,7 +9,7 @@ What's New in SQLAlchemy 0.9?
and SQLAlchemy version 0.9, which is expected for release
in late 2013.
- Document date: May 29, 2013
+ Document last updated: January 8, 2014
Introduction
============
@@ -18,9 +18,8 @@ This guide introduces what's new in SQLAlchemy version 0.9,
and also documents changes which affect users migrating
their applications from the 0.8 series of SQLAlchemy to 0.9.
-Version 0.9 is a faster-than-usual push from version 0.8,
-featuring a more versatile codebase with regards to modern
-Python versions. See :ref:`behavioral_changes_09` for
+Please carefully review
+:ref:`behavioral_changes_orm_09` and :ref:`behavioral_changes_core_09` for
potentially backwards-incompatible changes.
Platform Support
@@ -37,15 +36,49 @@ Python 3. All SQLAlchemy modules and unit tests are now interpreted
equally well with any Python interpreter from 2.6 forward, including
the 3.1 and 3.2 interpreters.
-At the moment, the C extensions are still not fully ported to
-Python 3.
+:ticket:`2671`
+C Extensions Supported on Python 3
+-----------------------------------
+The C extensions have been ported to support Python 3 and now build
+in both Python 2 and Python 3 environments.
-.. _behavioral_changes_09:
+:ticket:`2161`
+
+.. _behavioral_changes_orm_09:
+
+Behavioral Changes - ORM
+========================
+
+.. _migration_2824:
+
+Composite attributes are now returned as their object form when queried on a per-attribute basis
+------------------------------------------------------------------------------------------------
+
+Using a :class:`.Query` in conjunction with a composite attribute now returns the object
+type maintained by that composite, rather than being broken out into individual
+columns. Using the mapping setup at :ref:`mapper_composite`::
+
+ >>> session.query(Vertex.start, Vertex.end).\
+ ... filter(Vertex.start == Point(3, 4)).all()
+ [(Point(x=3, y=4), Point(x=5, y=6))]
+
+This change is backwards-incompatible with code that expects the indivdual attribute
+to be expanded into individual columns. To get that behavior, use the ``.clauses``
+accessor::
+
+
+ >>> session.query(Vertex.start.clauses, Vertex.end.clauses).\
+ ... filter(Vertex.start == Point(3, 4)).all()
+ [(3, 4, 5, 6)]
+
+.. seealso::
+
+ :ref:`change_2824`
+
+:ticket:`2824`
-Behavioral Changes
-==================
.. _migration_2736:
@@ -124,6 +157,65 @@ to 0.9 without issue.
:ticket:`2736`
+
+.. _migration_2833:
+
+``viewonly=True`` on ``relationship()`` prevents history from taking effect
+---------------------------------------------------------------------------
+
+The ``viewonly`` flag on :func:`.relationship` is applied to prevent changes
+to the target attribute from having any effect within the flush process.
+This is achieved by eliminating the attribute from being considered during
+the flush. However, up until now, changes to the attribute would still
+register the parent object as "dirty" and trigger a potential flush. The change
+is that the ``viewonly`` flag now prevents history from being set for the
+target attribute as well. Attribute events like backrefs and user-defined events
+still continue to function normally.
+
+The change is illustrated as follows::
+
+ from sqlalchemy import Column, Integer, ForeignKey, create_engine
+ from sqlalchemy.orm import backref, relationship, Session
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy import inspect
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+
+ class B(Base):
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(Integer, ForeignKey('a.id'))
+ a = relationship("A", backref=backref("bs", viewonly=True))
+
+ e = create_engine("sqlite://")
+ Base.metadata.create_all(e)
+
+ a = A()
+ b = B()
+
+ sess = Session(e)
+ sess.add_all([a, b])
+ sess.commit()
+
+ b.a = a
+
+ assert b in sess.dirty
+
+ # before 0.9.0
+ # assert a in sess.dirty
+ # assert inspect(a).attrs.bs.history.has_changes()
+
+ # after 0.9.0
+ assert a not in sess.dirty
+ assert not inspect(a).attrs.bs.history.has_changes()
+
+:ticket:`2833`
+
.. _migration_2751:
Association Proxy SQL Expression Improvements and Fixes
@@ -215,9 +307,473 @@ against ``b_value`` directly.
:ticket:`2751`
+.. _migration_2810:
+
+Association Proxy Missing Scalar returns None
+---------------------------------------------
+
+An association proxy from a scalar attribute to a scalar will now return
+``None`` if the proxied object isn't present. This is consistent with the
+fact that missing many-to-ones return None in SQLAlchemy, so should the
+proxied value. E.g.::
+
+ from sqlalchemy import *
+ from sqlalchemy.orm import *
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy.ext.associationproxy import association_proxy
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ b = relationship("B", uselist=False)
+
+ bname = association_proxy("b", "name")
+
+ class B(Base):
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(Integer, ForeignKey('a.id'))
+ name = Column(String)
+
+ a1 = A()
+
+ # this is how m2o's always have worked
+ assert a1.b is None
+
+ # but prior to 0.9, this would raise AttributeError,
+ # now returns None just like the proxied value.
+ assert a1.bname is None
+
+:ticket:`2810`
+
+
+.. _change_2787:
+
+attributes.get_history() will query from the DB by default if value not present
+-------------------------------------------------------------------------------
+
+A bugfix regarding :func:`.attributes.get_history` allows a column-based attribute
+to query out to the database for an unloaded value, assuming the ``passive``
+flag is left at its default of ``PASSIVE_OFF``. Previously, this flag would
+not be honored. Additionally, a new method :meth:`.AttributeState.load_history`
+is added to complement the :attr:`.AttributeState.history` attribute, which
+will emit loader callables for an unloaded attribute.
+
+This is a small change demonstrated as follows::
+
+ from sqlalchemy import Column, Integer, String, create_engine, inspect
+ from sqlalchemy.orm import Session, attributes
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+ data = Column(String)
+
+ e = create_engine("sqlite://", echo=True)
+ Base.metadata.create_all(e)
+
+ sess = Session(e)
+
+ a1 = A(data='a1')
+ sess.add(a1)
+ sess.commit() # a1 is now expired
+
+ # history doesn't emit loader callables
+ assert inspect(a1).attrs.data.history == (None, None, None)
+
+ # in 0.8, this would fail to load the unloaded state.
+ assert attributes.get_history(a1, 'data') == ((), ['a1',], ())
+
+ # load_history() is now equiavlent to get_history() with
+ # passive=PASSIVE_OFF ^ INIT_OK
+ assert inspect(a1).attrs.data.load_history() == ((), ['a1',], ())
+
+:ticket:`2787`
+
+.. _behavioral_changes_core_09:
+
+Behavioral Changes - Core
+=========================
+
+``None`` can no longer be used as a "partial AND" constructor
+--------------------------------------------------------------
+
+``None`` can no longer be used as the "backstop" to form an AND condition piecemeal.
+This pattern was not a documented pattern even though some SQLAlchemy internals
+made use of it::
+
+ condition = None
+
+ for cond in conditions:
+ condition = condition & cond
+
+ if condition is not None:
+ stmt = stmt.where(condition)
+
+The above sequence, when ``conditions`` is non-empty, will on 0.9 produce
+``SELECT .. WHERE <condition> AND NULL``. The ``None`` is no longer implicitly
+ignored, and is instead consistent with when ``None`` is interpreted in other
+contexts besides that of a conjunction.
+
+The correct code for both 0.8 and 0.9 should read::
+
+ from sqlalchemy.sql import and_
+
+ if conditions:
+ stmt = stmt.where(and_(*conditions))
+
+Another variant that works on all backends on 0.9, but on 0.8 only works on
+backends that support boolean constants::
+
+ from sqlalchemy.sql import true
+
+ condition = true()
+
+ for cond in conditions:
+ condition = cond & condition
+
+ stmt = stmt.where(condition)
+
+On 0.8, this will produce a SELECT statement that always has ``AND true``
+in the WHERE clause, which is not accepted by backends that don't support
+boolean constants (MySQL, MSSQL). On 0.9, the ``true`` constant will be dropped
+within an ``and_()`` conjunction.
+
+.. seealso::
+
+ :ref:`migration_2804`
+
+.. _migration_2873:
+
+The "password" portion of a ``create_engine()`` no longer considers the ``+`` sign as an encoded space
+------------------------------------------------------------------------------------------------------
+
+For whatever reason, the Python function ``unquote_plus()`` was applied to the
+"password" field of a URL, which is an incorrect application of the
+encoding rules described in `RFC 1738 <http://www.ietf.org/rfc/rfc1738.txt>`_
+in that it escaped spaces as plus signs. The stringiciation of a URL
+now only encodes ":", "@", or "/" and nothing else, and is now applied to both the
+``username`` and ``password`` fields (previously it only applied to the
+password). On parsing, encoded characters are converted, but plus signs and
+spaces are passed through as is::
+
+ # password: "pass word + other:words"
+ dbtype://user:pass word + other%3Awords@host/dbname
+
+ # password: "apples/oranges"
+ dbtype://username:apples%2Foranges@hostspec/database
+
+ # password: "apples@oranges@@"
+ dbtype://username:apples%40oranges%40%40@hostspec/database
+
+ # password: '', username is "username@"
+ dbtype://username%40:@hostspec/database
+
+
+:ticket:`2873`
+
+.. _migration_2879:
+
+The precedence rules for COLLATE have been changed
+--------------------------------------------------
+
+Previously, an expression like the following::
+
+ print (column('x') == 'somevalue').collate("en_EN")
+
+would produce an expression like this::
+
+ -- 0.8 behavior
+ (x = :x_1) COLLATE en_EN
+
+The above is misunderstood by MSSQL and is generally not the syntax suggested
+for any database. The expression will now produce the syntax illustrated
+by that of most database documentation::
+
+ -- 0.9 behavior
+ x = :x_1 COLLATE en_EN
+
+The potentially backwards incompatible change arises if the :meth:`.collate`
+operator is being applied to the right-hand column, as follows::
+
+ print column('x') == literal('somevalue').collate("en_EN")
+
+In 0.8, this produces::
+
+ x = :param_1 COLLATE en_EN
+
+However in 0.9, will now produce the more accurate, but probably not what you
+want, form of::
+
+ x = (:param_1 COLLATE en_EN)
+
+The :meth:`.ColumnOperators.collate` operator now works more appropriately within an
+``ORDER BY`` expression as well, as a specific precedence has been given to the
+``ASC`` and ``DESC`` operators which will again ensure no parentheses are
+generated::
+
+ >>> # 0.8
+ >>> print column('x').collate('en_EN').desc()
+ (x COLLATE en_EN) DESC
+
+ >>> # 0.9
+ >>> print column('x').collate('en_EN').desc()
+ x COLLATE en_EN DESC
+
+:ticket:`2879`
+
+
+
+.. _migration_2878:
+
+Postgresql CREATE TYPE <x> AS ENUM now applies quoting to values
+----------------------------------------------------------------
+
+The :class:`.postgresql.ENUM` type will now apply escaping to single quote
+signs within the enumerated values::
+
+ >>> from sqlalchemy.dialects import postgresql
+ >>> type = postgresql.ENUM('one', 'two', "three's", name="myenum")
+ >>> from sqlalchemy.dialects.postgresql import base
+ >>> print base.CreateEnumType(type).compile(dialect=postgresql.dialect())
+ CREATE TYPE myenum AS ENUM ('one','two','three''s')
+
+Existing workarounds which already escape single quote signs will need to be
+modified, else they will now double-escape.
+
+:ticket:`2878`
+
New Features
============
+.. _feature_2268:
+
+Event Removal API
+-----------------
+
+Events established using :func:`.event.listen` or :func:`.event.listens_for`
+can now be removed using the new :func:`.event.remove` function. The ``target``,
+``identifier`` and ``fn`` arguments sent to :func:`.event.remove` need to match
+exactly those which were sent for listening, and the event will be removed
+from all locations in which it had been established::
+
+ @event.listens_for(MyClass, "before_insert", propagate=True)
+ def my_before_insert(mapper, connection, target):
+ """listen for before_insert"""
+ # ...
+
+ event.remove(MyClass, "before_insert", my_before_insert)
+
+In the example above, the ``propagate=True`` flag is set. This
+means ``my_before_insert()`` is established as a listener for ``MyClass``
+as well as all subclasses of ``MyClass``.
+The system tracks everywhere that the ``my_before_insert()``
+listener function had been placed as a result of this call and removes it as
+a result of calling :func:`.event.remove`.
+
+The removal system uses a registry to associate arguments passed to
+:func:`.event.listen` with collections of event listeners, which are in many
+cases wrapped versions of the original user-supplied function. This registry
+makes heavy use of weak references in order to allow all the contained contents,
+such as listener targets, to be garbage collected when they go out of scope.
+
+:ticket:`2268`
+
+.. _feature_1418:
+
+New Query Options API; ``load_only()`` option
+---------------------------------------------
+
+The system of loader options such as :func:`.orm.joinedload`,
+:func:`.orm.subqueryload`, :func:`.orm.lazyload`, :func:`.orm.defer`, etc.
+all build upon a new system known as :class:`.Load`. :class:`.Load` provides
+a "method chained" (a.k.a. :term:`generative`) approach to loader options, so that
+instead of joining together long paths using dots or multiple attribute names,
+an explicit loader style is given for each path.
+
+While the new way is slightly more verbose, it is simpler to understand
+in that there is no ambiguity in what options are being applied to which paths;
+it simplifies the method signatures of the options and provides greater flexibility
+particularly for column-based options. The old systems are to remain functional
+indefinitely as well and all styles can be mixed.
+
+**Old Way**
+
+To set a certain style of loading along every link in a multi-element path, the ``_all()``
+option has to be used::
+
+ query(User).options(joinedload_all("orders.items.keywords"))
+
+**New Way**
+
+Loader options are now chainable, so the same ``joinedload(x)`` method is applied
+equally to each link, without the need to keep straight between
+:func:`.joinedload` and :func:`.joinedload_all`::
+
+ query(User).options(joinedload("orders").joinedload("items").joinedload("keywords"))
+
+**Old Way**
+
+Setting an option on path that is based on a subclass requires that all
+links in the path be spelled out as class bound attributes, since the
+:meth:`.PropComparator.of_type` method needs to be called::
+
+ session.query(Company).\
+ options(
+ subqueryload_all(
+ Company.employees.of_type(Engineer),
+ Engineer.machines
+ )
+ )
+
+**New Way**
+
+Only those elements in the path that actually need :meth:`.PropComparator.of_type`
+need to be set as a class-bound attribute, string-based names can be resumed
+afterwards::
+
+ session.query(Company).\
+ options(
+ subqueryload(Company.employees.of_type(Engineer)).
+ subqueryload("machines")
+ )
+ )
+
+**Old Way**
+
+Setting the loader option on the last link in a long path uses a syntax
+that looks a lot like it should be setting the option for all links in the
+path, causing confusion::
+
+ query(User).options(subqueryload("orders.items.keywords"))
+
+**New Way**
+
+A path can now be spelled out using :func:`.defaultload` for entries in the
+path where the existing loader style should be unchanged. More verbose
+but the intent is clearer::
+
+ query(User).options(defaultload("orders").defaultload("items").subqueryload("keywords"))
+
+
+The dotted style can still be taken advantage of, particularly in the case
+of skipping over several path elements::
+
+ query(User).options(defaultload("orders.items").subqueryload("keywords"))
+
+**Old Way**
+
+The :func:`.defer` option on a path needed to be spelled out with the full
+path for each column::
+
+ query(User).options(defer("orders.description"), defer("orders.isopen"))
+
+**New Way**
+
+A single :class:`.Load` object that arrives at the target path can have
+:meth:`.Load.defer` called upon it repeatedly::
+
+ query(User).options(defaultload("orders").defer("description").defer("isopen"))
+
+The Load Class
+^^^^^^^^^^^^^^^
+
+The :class:`.Load` class can be used directly to provide a "bound" target,
+especially when multiple parent entities are present::
+
+ from sqlalchemy.orm import Load
+
+ query(User, Address).options(Load(Address).joinedload("entries"))
+
+Load Only
+^^^^^^^^^
+
+A new option :func:`.load_only` achieves a "defer everything but" style of load,
+loading only the given columns and deferring the rest::
+
+ from sqlalchemy.orm import load_only
+
+ query(User).options(load_only("name", "fullname"))
+
+ # specify explicit parent entity
+ query(User, Address).options(Load(User).load_only("name", "fullname"))
+
+ # specify path
+ query(User).options(joinedload(User.addresses).load_only("email_address"))
+
+Class-specific Wildcards
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Using :class:`.Load`, a wildcard may be used to set the loading for all
+relationships (or perhaps columns) on a given entity, without affecting any
+others::
+
+ # lazyload all User relationships
+ query(User).options(Load(User).lazyload("*"))
+
+ # undefer all User columns
+ query(User).options(Load(User).undefer("*"))
+
+ # lazyload all Address relationships
+ query(User).options(defaultload(User.addresses).lazyload("*"))
+
+ # undefer all Address columns
+ query(User).options(defaultload(User.addresses).undefer("*"))
+
+
+:ticket:`1418`
+
+
+.. _feature_2877:
+
+New ``text()`` Capabilities
+---------------------------
+
+The :func:`.text` construct gains new methods:
+
+* :meth:`.TextClause.bindparams` allows bound parameter types and values
+ to be set flexibly::
+
+ # setup values
+ stmt = text("SELECT id, name FROM user "
+ "WHERE name=:name AND timestamp=:timestamp").\
+ bindparams(name="ed", timestamp=datetime(2012, 11, 10, 15, 12, 35))
+
+ # setup types and/or values
+ stmt = text("SELECT id, name FROM user "
+ "WHERE name=:name AND timestamp=:timestamp").\
+ bindparams(
+ bindparam("name", value="ed"),
+ bindparam("timestamp", type_=DateTime()
+ ).bindparam(timestamp=datetime(2012, 11, 10, 15, 12, 35))
+
+* :meth:`.TextClause.columns` supersedes the ``typemap`` option
+ of :func:`.text`, returning a new construct :class:`.TextAsFrom`::
+
+ # turn a text() into an alias(), with a .c. collection:
+ stmt = text("SELECT id, name FROM user").columns(id=Integer, name=String)
+ stmt = stmt.alias()
+
+ stmt = select([addresses]).select_from(
+ addresses.join(stmt), addresses.c.user_id == stmt.c.id)
+
+
+ # or into a cte():
+ stmt = text("SELECT id, name FROM user").columns(id=Integer, name=String)
+ stmt = stmt.cte("x")
+
+ stmt = select([addresses]).select_from(
+ addresses.join(stmt), addresses.c.user_id == stmt.c.id)
+
+:ticket:`2877`
+
.. _feature_722:
INSERT from SELECT
@@ -252,11 +808,247 @@ rendering::
:ticket:`722`
+.. _feature_github_42:
+
+New FOR UPDATE support on ``select()``, ``Query()``
+---------------------------------------------------
+
+An attempt is made to simplify the specification of the ``FOR UPDATE``
+clause on ``SELECT`` statements made within Core and ORM, and support is added
+for the ``FOR UPDATE OF`` SQL supported by Postgresql and Oracle.
+
+Using the core :meth:`.GenerativeSelect.with_for_update`, options like ``FOR SHARE`` and
+``NOWAIT`` can be specified individually, rather than linking to arbitrary
+string codes::
+
+ stmt = select([table]).with_for_update(read=True, nowait=True, of=table)
+
+On Posgtresql the above statement might render like::
+
+ SELECT table.a, table.b FROM table FOR SHARE OF table NOWAIT
+
+The :class:`.Query` object gains a similar method :meth:`.Query.with_for_update`
+which behaves in the same way. This method supersedes the existing
+:meth:`.Query.with_lockmode` method, which translated ``FOR UPDATE`` clauses
+using a different system. At the moment, the "lockmode" string argument is still
+accepted by the :meth:`.Session.refresh` method.
+
+
+.. _feature_2867:
+
+Floating Point String-Conversion Precision Configurable for Native Floating Point Types
+---------------------------------------------------------------------------------------
+
+The conversion which SQLAlchemy does whenever a DBAPI returns a Python
+floating point type which is to be converted into a Python ``Decimal()``
+necessarily involves an intermediary step which converts the floating point
+value to a string. The scale used for this string conversion was previously
+hardcoded to 10, and is now configurable. The setting is available on
+both the :class:`.Numeric` as well as the :class:`.Float`
+type, as well as all SQL- and dialect-specific descendant types, using the
+parameter ``decimal_return_scale``. If the type supports a ``.scale`` parameter,
+as is the case with :class:`.Numeric` and some float types such as
+:class:`.mysql.DOUBLE`, the value of ``.scale`` is used as the default
+for ``.decimal_return_scale`` if it is not otherwise specified. If both
+``.scale`` and ``.decimal_return_scale`` are absent, then the default of
+10 takes place. E.g.::
+
+ from sqlalchemy.dialects.mysql import DOUBLE
+ import decimal
+
+ data = Table('data', metadata,
+ Column('double_value',
+ mysql.DOUBLE(decimal_return_scale=12, asdecimal=True))
+ )
+
+ conn.execute(
+ data.insert(),
+ double_value=45.768392065789,
+ )
+ result = conn.scalar(select([data.c.double_value]))
+
+ # previously, this would typically be Decimal("45.7683920658"),
+ # e.g. trimmed to 10 decimal places
+
+ # now we get 12, as requested, as MySQL can support this
+ # much precision for DOUBLE
+ assert result == decimal.Decimal("45.768392065789")
+
+
+:ticket:`2867`
+
+
+.. _change_2824:
+
+Column Bundles for ORM queries
+------------------------------
+
+The :class:`.Bundle` allows for querying of sets of columns, which are then
+grouped into one name under the tuple returned by the query. The initial
+purposes of :class:`.Bundle` are 1. to allow "composite" ORM columns to be
+returned as a single value in a column-based result set, rather than expanding
+them out into individual columns and 2. to allow the creation of custom result-set
+constructs within the ORM, using ad-hoc columns and return types, without involving
+the more heavyweight mechanics of mapped classes.
+
+.. seealso::
+
+ :ref:`migration_2824`
+
+ :ref:`bundles`
+
+:ticket:`2824`
+
+
+Server Side Version Counting
+-----------------------------
+
+The versioning feature of the ORM (now also documented at :ref:`mapper_version_counter`)
+can now make use of server-side version counting schemes, such as those produced
+by triggers or database system columns, as well as conditional programmatic schemes outside
+of the version_id_counter function itself. By providing the value ``False``
+to the ``version_id_generator`` parameter, the ORM will use the already-set version
+identifier, or alternatively fetch the version identifier
+from each row at the same time the INSERT or UPDATE is emitted. When using a
+server-generated version identifier, it is strongly
+recommended that this feature be used only on a backend with strong RETURNING
+support (Postgresql, SQL Server; Oracle also supports RETURNING but the cx_oracle
+driver has only limited support), else the additional SELECT statements will
+add significant performance
+overhead. The example provided at :ref:`server_side_version_counter` illustrates
+the usage of the Postgresql ``xmin`` system column in order to integrate it with
+the ORM's versioning feature.
+
+.. seealso::
+
+ :ref:`server_side_version_counter`
+
+:ticket:`2793`
+
+.. _feature_1535:
+
+``include_backrefs=False`` option for ``@validates``
+----------------------------------------------------
+
+The :func:`.validates` function now accepts an option ``include_backrefs=True``,
+which will bypass firing the validator for the case where the event initiated
+from a backref::
+
+ from sqlalchemy import Column, Integer, ForeignKey
+ from sqlalchemy.orm import relationship, validates
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ bs = relationship("B", backref="a")
+
+ @validates("bs")
+ def validate_bs(self, key, item):
+ print("A.bs validator")
+ return item
+
+ class B(Base):
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+ a_id = Column(Integer, ForeignKey('a.id'))
+
+ @validates("a", include_backrefs=False)
+ def validate_a(self, key, item):
+ print("B.a validator")
+ return item
+
+ a1 = A()
+ a1.bs.append(B()) # prints only "A.bs validator"
+
+
+:ticket:`1535`
+
+
+Postgresql JSON Type
+--------------------
+
+The Postgresql dialect now features a :class:`.postgresql.JSON` type to
+complement the :class:`.postgresql.HSTORE` type.
+
+.. seealso::
+
+ :class:`.postgresql.JSON`
+
+:ticket:`2581`
+
+.. _feature_automap:
+
+Automap Extension
+-----------------
+
+A new extension is added in **0.9.1** known as :mod:`sqlalchemy.ext.automap`. This is an
+**experimental** extension which expands upon the functionality of Declarative
+as well as the :class:`.DeferredReflection` class. Essentially, the extension
+provides a base class :class:`.AutomapBase` which automatically generates
+mapped classes and relationships between them based on given table metadata.
+
+The :class:`.MetaData` in use normally might be produced via reflection, but
+there is no requirement that reflection is used. The most basic usage
+illustrates how :mod:`sqlalchemy.ext.automap` is able to deliver mapped
+classes, including relationships, based on a reflected schema::
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy.orm import Session
+ from sqlalchemy import create_engine
+
+ Base = automap_base()
+
+ # engine, suppose it has two tables 'user' and 'address' set up
+ engine = create_engine("sqlite:///mydatabase.db")
+
+ # reflect the tables
+ Base.prepare(engine, reflect=True)
+
+ # mapped classes are now created with names matching that of the table
+ # name.
+ User = Base.classes.user
+ Address = Base.classes.address
+
+ session = Session(engine)
+
+ # rudimentary relationships are produced
+ session.add(Address(email_address="foo@bar.com", user=User(name="foo")))
+ session.commit()
+
+ # collection-based relationships are by default named "<classname>_collection"
+ print (u1.address_collection)
+
+Beyond that, the :class:`.AutomapBase` class is a declarative base, and supports
+all the features that declarative does. The "automapping" feature can be used
+with an existing, explicitly declared schema to generate relationships and
+missing classes only. Naming schemes and relationship-production routines
+can be dropped in using callable functions.
+
+It is hoped that the :class:`.AutomapBase` system provides a quick
+and modernized solution to the problem that the very famous
+`SQLSoup <https://sqlsoup.readthedocs.org/en/latest/>`_
+also tries to solve, that of generating a quick and rudimentary object
+model from an existing database on the fly. By addressing the issue strictly
+at the mapper configuration level, and integrating fully with existing
+Declarative class techniques, :class:`.AutomapBase` seeks to provide
+a well-integrated approach to the issue of expediently auto-generating ad-hoc
+mappings.
+
+.. seealso::
+
+ :ref:`automap_toplevel`
+
Behavioral Improvements
=======================
-Improvements that should produce no compatibility issues, but are good
-to be aware of in case there are unexpected issues.
+Improvements that should produce no compatibility issues except in exceedingly
+rare and unusual hypothetical cases, but are good to be aware of in case there are
+unexpected issues.
.. _feature_joins_09:
@@ -422,6 +1214,254 @@ Generates (everywhere except SQLite)::
:ticket:`2369` :ticket:`2587`
+ORM can efficiently fetch just-generated INSERT/UPDATE defaults using RETURNING
+-------------------------------------------------------------------------------
+
+The :class:`.Mapper` has long supported an undocumented flag known as
+``eager_defaults=True``. The effect of this flag is that when an INSERT or UPDATE
+proceeds, and the row is known to have server-generated default values,
+a SELECT would immediately follow it in order to "eagerly" load those new values.
+Normally, the server-generated columns are marked as "expired" on the object,
+so that no overhead is incurred unless the application actually accesses these
+columns soon after the flush. The ``eager_defaults`` flag was therefore not
+of much use as it could only decrease performance, and was present only to support
+exotic event schemes where users needed default values to be available
+immediately within the flush process.
+
+In 0.9, as a result of the version id enhancements, ``eager_defaults`` can now
+emit a RETURNING clause for these values, so on a backend with strong RETURNING
+support in particular Postgresql, the ORM can fetch newly generated default
+and SQL expression values inline with the INSERT or UPDATE. ``eager_defaults``,
+when enabled, makes use of RETURNING automatically when the target backend
+and :class:`.Table` supports "implicit returning".
+
+.. _change_2836:
+
+Subquery Eager Loading will apply DISTINCT to the innermost SELECT for some queries
+------------------------------------------------------------------------------------
+
+In an effort to reduce the number of duplicate rows that can be generated
+by subquery eager loading when a many-to-one relationship is involved, a
+DISTINCT keyword will be applied to the innermost SELECT when the join is
+targeting columns that do not comprise the primary key, as in when loading
+along a many to one.
+
+That is, when subquery loading on a many-to-one from A->B::
+
+ SELECT b.id AS b_id, b.name AS b_name, anon_1.b_id AS a_b_id
+ FROM (SELECT DISTINCT a_b_id FROM a) AS anon_1
+ JOIN b ON b.id = anon_1.a_b_id
+
+Since ``a.b_id`` is a non-distinct foreign key, DISTINCT is applied so that
+redundant ``a.b_id`` are eliminated. The behavior can be turned on or off
+unconditionally for a particular :func:`.relationship` using the flag
+``distinct_target_key``, setting the value to ``True`` for unconditionally
+on, ``False`` for unconditionally off, and ``None`` for the feature to take
+effect when the target SELECT is against columns that do not comprise a full
+primary key. In 0.9, ``None`` is the default.
+
+The option is also backported to 0.8 where the ``distinct_target_key``
+option defaults to ``False``.
+
+While the feature here is designed to help performance by eliminating
+duplicate rows, the ``DISTINCT`` keyword in SQL itself can have a negative
+performance impact. If columns in the SELECT are not indexed, ``DISTINCT``
+will likely perform an ``ORDER BY`` on the rowset which can be expensive.
+By keeping the feature limited just to foreign keys which are hopefully
+indexed in any case, it's expected that the new defaults are reasonable.
+
+The feature also does not eliminate every possible dupe-row scenario; if
+a many-to-one is present elsewhere in the chain of joins, dupe rows may still
+be present.
+
+:ticket:`2836`
+
+.. _migration_2789:
+
+Backref handlers can now propagate more than one level deep
+-----------------------------------------------------------
+
+The mechanism by which attribute events pass along their "initiator", that is
+the object associated with the start of the event, has been changed; instead
+of a :class:`.AttributeImpl` being passed, a new object :class:`.attributes.Event`
+is passed instead; this object refers to the :class:`.AttributeImpl` as well as
+to an "operation token", representing if the operation is an append, remove,
+or replace operation.
+
+The attribute event system no longer looks at this "initiator" object in order to halt a
+recursive series of attribute events. Instead, the system of preventing endless
+recursion due to mutually-dependent backref handlers has been moved
+to the ORM backref event handlers specifically, which now take over the role
+of ensuring that a chain of mutually-dependent events (such as append to collection
+A.bs, set many-to-one attribute B.a in response) doesn't go into an endless recursion
+stream. The rationale here is that the backref system, given more detail and control
+over event propagation, can finally allow operations more than one level deep
+to occur; the typical scenario is when a collection append results in a many-to-one
+replacement operation, which in turn should cause the item to be removed from a
+previous collection::
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child", backref="parent")
+
+ class Child(Base):
+ __tablename__ = 'child'
+
+ id = Column(Integer, primary_key=True)
+ parent_id = Column(ForeignKey('parent.id'))
+
+ p1 = Parent()
+ p2 = Parent()
+ c1 = Child()
+
+ p1.children.append(c1)
+
+ assert c1.parent is p1 # backref event establishes c1.parent as p1
+
+ p2.children.append(c1)
+
+ assert c1.parent is p2 # backref event establishes c1.parent as p2
+ assert c1 not in p1.children # second backref event removes c1 from p1.children
+
+Above, prior to this change, the ``c1`` object would still have been present
+in ``p1.children``, even though it is also present in ``p2.children`` at the
+same time; the backref handlers would have stopped at replacing ``c1.parent`` with
+``p2`` instead of ``p1``. In 0.9, using the more detailed :class:`.Event`
+object as well as letting the backref handlers make more detailed decisions about
+these objects, the propagation can continue onto removing ``c1`` from ``p1.children``
+while maintaining a check against the propagation from going into an endless
+recursive loop.
+
+End-user code which a. makes use of the :meth:`.AttributeEvents.set`,
+:meth:`.AttributeEvents.append`, or :meth:`.AttributeEvents.remove` events,
+and b. initiates further attribute modification operations as a result of these
+events may need to be modified to prevent recursive loops, as the attribute system
+no longer stops a chain of events from propagating endlessly in the absense of the backref
+event handlers. Additionally, code which depends upon the value of the ``initiator``
+will need to be adjusted to the new API, and furthermore must be ready for the
+value of ``initiator`` to change from its original value within a string of
+backref-initiated events, as the backref handlers may now swap in a
+new ``initiator`` value for some operations.
+
+:ticket:`2789`
+
+.. _change_2838:
+
+The typing system now handles the task of rendering "literal bind" values
+-------------------------------------------------------------------------
+
+A new method is added to :class:`.TypeEngine` :meth:`.TypeEngine.literal_processor`
+as well as :meth:`.TypeDecorator.process_literal_param` for :class:`.TypeDecorator`
+which take on the task of rendering so-called "inline literal paramters" - parameters
+that normally render as "bound" values, but are instead being rendered inline
+into the SQL statement due to the compiler configuration. This feature is used
+when generating DDL for constructs such as :class:`.CheckConstraint`, as well
+as by Alembic when using constructs such as ``op.inline_literal()``. Previously,
+a simple "isinstance" check checked for a few basic types, and the "bind processor"
+was used unconditionally, leading to such issues as strings being encoded into utf-8
+prematurely.
+
+Custom types written with :class:`.TypeDecorator` should continue to work in
+"inline literal" scenarios, as the :meth:`.TypeDecorator.process_literal_param`
+falls back to :meth:`.TypeDecorator.process_bind_param` by default, as these methods
+usually handle a data manipulation, not as much how the data is presented to the
+database. :meth:`.TypeDecorator.process_literal_param` can be specified to
+specifically produce a string representing how a value should be rendered
+into an inline DDL statement.
+
+:ticket:`2838`
+
+
+.. _change_2812:
+
+Schema identifiers now carry along their own quoting information
+---------------------------------------------------------------------
+
+This change simplifies the Core's usage of so-called "quote" flags, such
+as the ``quote`` flag passed to :class:`.Table` and :class:`.Column`. The flag
+is now internalized within the string name itself, which is now represented
+as an instance of :class:`.quoted_name`, a string subclass. The
+:class:`.IdentifierPreparer` now relies solely on the quoting preferences
+reported by the :class:`.quoted_name` object rather than checking for any
+explicit ``quote`` flags in most cases. The issue resolved here includes
+that various case-sensitive methods such as :meth:`.Engine.has_table` as well
+as similar methods within dialects now function with explicitly quoted names,
+without the need to complicate or introduce backwards-incompatible changes
+to those APIs (many of which are 3rd party) with the details of quoting flags -
+in particular, a wider range of identifiers now function correctly with the
+so-called "uppercase" backends like Oracle, Firebird, and DB2 (backends that
+store and report upon table and column names using all uppercase for case
+insensitive names).
+
+The :class:`.quoted_name` object is used internally as needed; however if
+other keywords require fixed quoting preferences, the class is available
+publically.
+
+:ticket:`2812`
+
+.. _migration_2804:
+
+Improved rendering of Boolean constants, NULL constants, conjunctions
+----------------------------------------------------------------------
+
+New capabilities have been added to the :func:`.true` and :func:`.false`
+constants, in particular in conjunction with :func:`.and_` and :func:`.or_`
+functions as well as the behavior of the WHERE/HAVING clauses in conjunction
+with these types, boolean types overall, and the :func:`.null` constant.
+
+Starting with a table such as this::
+
+ from sqlalchemy import Table, Boolean, Integer, Column, MetaData
+
+ t1 = Table('t', MetaData(), Column('x', Boolean()), Column('y', Integer))
+
+A select construct will now render the boolean column as a binary expression
+on backends that don't feature ``true``/``false`` constant beahvior::
+
+ >>> from sqlalchemy import select, and_, false, true
+ >>> from sqlalchemy.dialects import mysql, postgresql
+
+ >>> print select([t1]).where(t1.c.x).compile(dialect=mysql.dialect())
+ SELECT t.x, t.y FROM t WHERE t.x = 1
+
+The :func:`.and_` and :func:`.or_` constructs will now exhibit quasi
+"short circuit" behavior, that is truncating a rendered expression, when a
+:func:`.true` or :func:`.false` constant is present::
+
+ >>> print select([t1]).where(and_(t1.c.y > 5, false())).compile(
+ ... dialect=postgresql.dialect())
+ SELECT t.x, t.y FROM t WHERE false
+
+:func:`.true` can be used as the base to build up an expression::
+
+ >>> expr = true()
+ >>> expr = expr & (t1.c.y > 5)
+ >>> print select([t1]).where(expr)
+ SELECT t.x, t.y FROM t WHERE t.y > :y_1
+
+The boolean constants :func:`.true` and :func:`.false` themselves render as
+``0 = 1`` and ``1 = 1`` for a backend with no boolean constants::
+
+ >>> print select([t1]).where(and_(t1.c.y > 5, false())).compile(
+ ... dialect=mysql.dialect())
+ SELECT t.x, t.y FROM t WHERE 0 = 1
+
+Interpretation of ``None``, while not particularly valid SQL, is at least
+now consistent::
+
+ >>> print select([t1.c.x]).where(None)
+ SELECT t.x FROM t WHERE NULL
+
+ >>> print select([t1.c.x]).where(None).where(None)
+ SELECT t.x FROM t WHERE NULL AND NULL
+
+ >>> print select([t1.c.x]).where(and_(None, None))
+ SELECT t.x FROM t WHERE NULL AND NULL
+
+:ticket:`2804`
+
.. _migration_1068:
Label constructs can now render as their name alone in an ORDER BY
@@ -453,13 +1493,93 @@ And now renders as::
SELECT foo(t.c1) + t.c2 AS expr
FROM t ORDER BY expr
-The ORDER BY only renders the label if the label isn't further embedded into an expression within the ORDER BY, other than a simple ``ASC`` or ``DESC``.
+The ORDER BY only renders the label if the label isn't further
+embedded into an expression within the ORDER BY, other than a simple
+``ASC`` or ``DESC``.
-The above format works on all databases tested, but might have compatibility issues with older database versions (MySQL 4? Oracle 8? etc.). Based on user reports we can add rules
-that will disable the feature based on database version detection.
+The above format works on all databases tested, but might have
+compatibility issues with older database versions (MySQL 4? Oracle 8?
+etc.). Based on user reports we can add rules that will disable the
+feature based on database version detection.
:ticket:`1068`
+.. _migration_2848:
+
+``RowProxy`` now has tuple-sorting behavior
+-------------------------------------------
+
+The :class:`.RowProxy` object acts much like a tuple, but up until now
+would not sort as a tuple if a list of them were sorted using ``sorted()``.
+The ``__eq__()`` method now compares both sides as a tuple and also
+an ``__lt__()`` method has been added::
+
+ users.insert().execute(
+ dict(user_id=1, user_name='foo'),
+ dict(user_id=2, user_name='bar'),
+ dict(user_id=3, user_name='def'),
+ )
+
+ rows = users.select().order_by(users.c.user_name).execute().fetchall()
+
+ eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')])
+
+ eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')])
+
+:ticket:`2848`
+
+.. _migration_2850:
+
+A bindparam() construct with no type gets upgraded via copy when a type is available
+------------------------------------------------------------------------------------
+
+The logic which "upgrades" a :func:`.bindparam` construct to take on the
+type of the enclosing expression has been improved in two ways. First, the
+:func:`.bindparam` object is **copied** before the new type is assigned, so that
+the given :func:`.bindparam` is not mutated in place. Secondly, this same
+operation occurs when an :class:`.Insert` or :class:`.Update` construct is compiled,
+regarding the "values" that were set in the statement via the :meth:`.ValuesBase.values`
+method.
+
+If given an untyped :func:`.bindparam`::
+
+ bp = bindparam("some_col")
+
+If we use this parameter as follows::
+
+ expr = mytable.c.col == bp
+
+The type for ``bp`` remains as ``NullType``, however if ``mytable.c.col``
+is of type ``String``, then ``expr.right``, that is the right side of the
+binary expression, will take on the ``String`` type. Previously, ``bp`` itself
+would have been changed in place to have ``String`` as its type.
+
+Similarly, this operation occurs in an :class:`.Insert` or :class:`.Update`::
+
+ stmt = mytable.update().values(col=bp)
+
+Above, ``bp`` remains unchanged, but the ``String`` type will be used when
+the statement is executed, which we can see by examining the ``binds`` dictionary::
+
+ >>> compiled = stmt.compile()
+ >>> compiled.binds['some_col'].type
+ String
+
+The feature allows custom types to take their expected effect within INSERT/UPDATE
+statements without needing to explicitly specify those types within every
+:func:`.bindparam` expression.
+
+The potentially backwards-compatible changes involve two unlikely
+scenarios. Since the the bound parameter is
+**cloned**, users should not be relying upon making in-place changes to a
+:func:`.bindparam` construct once created. Additionally, code which uses
+:func:`.bindparam` within an :class:`.Insert` or :class:`.Update` statement
+which is relying on the fact that the :func:`.bindparam` is not typed according
+to the column being assigned towards will no longer function in that way.
+
+:ticket:`2850`
+
+
.. _migration_1765:
Columns can reliably get their type from a column referred to via ForeignKey
@@ -547,6 +1667,7 @@ Scenarios which now work correctly include:
:ticket:`1765`
+
Dialect Changes
===============
diff --git a/doc/build/conf.py b/doc/build/conf.py
index e7c116c18..1546177a6 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -18,7 +18,7 @@ import os
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../lib'))
-sys.path.insert(0, os.path.abspath('../../examples'))
+sys.path.insert(0, os.path.abspath('../..')) # examples
sys.path.insert(0, os.path.abspath('.'))
import sqlalchemy
@@ -34,10 +34,12 @@ import sqlalchemy
extensions = [
'sphinx.ext.autodoc',
'builder.autodoc_mods',
- 'builder.changelog',
+ 'changelog',
+ 'sphinx_paramlinks',
'builder.dialect_info',
'builder.mako',
'builder.sqlformatter',
+ 'builder.viewsource',
]
# Add any paths that contain templates here, relative to this directory.
@@ -62,7 +64,13 @@ changelog_inner_tag_sort = ["feature", "bug", "moved", "changed", "removed"]
# how to render changelog links
changelog_render_ticket = "http://www.sqlalchemy.org/trac/ticket/%s"
-changelog_render_pullreq = "https://bitbucket.org/sqlalchemy/sqlalchemy/pull-request/%s"
+
+changelog_render_pullreq = {
+ "bitbucket": "https://bitbucket.org/zzzeek/sqlalchemy/pull-request/%s",
+ "default": "https://bitbucket.org/zzzeek/sqlalchemy/pull-request/%s",
+ "github": "https://github.com/zzzeek/sqlalchemy/pull/%s",
+}
+
changelog_render_changeset = "http://www.sqlalchemy.org/trac/changeset/%s"
@@ -74,7 +82,7 @@ master_doc = 'contents'
# General information about the project.
project = u'SQLAlchemy'
-copyright = u'2007-2013, the SQLAlchemy authors and contributors'
+copyright = u'2007-2014, the SQLAlchemy authors and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -83,9 +91,9 @@ copyright = u'2007-2013, the SQLAlchemy authors and contributors'
# The short X.Y version.
version = "0.9"
# The full version, including alpha/beta/rc tags.
-release = "0.9.0"
+release = "0.9.1"
-release_date = "(not released)"
+release_date = "January 5, 2014"
site_base = "http://www.sqlalchemy.org"
@@ -280,7 +288,7 @@ man_pages = [
epub_title = u'SQLAlchemy'
epub_author = u'SQLAlchemy authors'
epub_publisher = u'SQLAlchemy authors'
-epub_copyright = u'2013, SQLAlchemy authors'
+epub_copyright = u'2007-2014, SQLAlchemy authors'
# The language of the text. It defaults to the language option
# or en if the language is not set.
diff --git a/doc/build/copyright.rst b/doc/build/copyright.rst
index 082beb3a3..659ce6c74 100644
--- a/doc/build/copyright.rst
+++ b/doc/build/copyright.rst
@@ -1,10 +1,12 @@
+:orphan:
+
====================
Appendix: Copyright
====================
This is the MIT license: `<http://www.opensource.org/licenses/mit-license.php>`_
-Copyright (c) 2005-2013 Michael Bayer and contributors.
+Copyright (c) 2005-2014 Michael Bayer and contributors.
SQLAlchemy is a trademark of Michael Bayer.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst
index b55ca2a82..c05bf18d0 100644
--- a/doc/build/core/connections.rst
+++ b/doc/build/core/connections.rst
@@ -495,19 +495,15 @@ Connection / Engine API
=======================
.. autoclass:: Connection
- :show-inheritance:
:members:
.. autoclass:: Connectable
- :show-inheritance:
:members:
.. autoclass:: Engine
- :show-inheritance:
:members:
.. autoclass:: NestedTransaction
- :show-inheritance:
:members:
.. autoclass:: sqlalchemy.engine.ResultProxy
@@ -517,10 +513,8 @@ Connection / Engine API
:members:
.. autoclass:: Transaction
- :show-inheritance:
:members:
.. autoclass:: TwoPhaseTransaction
- :show-inheritance:
:members:
diff --git a/doc/build/core/constraints.rst b/doc/build/core/constraints.rst
new file mode 100644
index 000000000..13ead6fbf
--- /dev/null
+++ b/doc/build/core/constraints.rst
@@ -0,0 +1,409 @@
+.. _metadata_constraints_toplevel:
+.. _metadata_constraints:
+
+.. module:: sqlalchemy.schema
+
+=================================
+Defining Constraints and Indexes
+=================================
+
+.. _metadata_foreignkeys:
+
+This section will discuss SQL :term:`constraints` and indexes. In SQLAlchemy
+the key classes include :class:`.ForeignKeyConstraint` and :class:`.Index`.
+
+Defining Foreign Keys
+---------------------
+
+A *foreign key* in SQL is a table-level construct that constrains one or more
+columns in that table to only allow values that are present in a different set
+of columns, typically but not always located on a different table. We call the
+columns which are constrained the *foreign key* columns and the columns which
+they are constrained towards the *referenced* columns. The referenced columns
+almost always define the primary key for their owning table, though there are
+exceptions to this. The foreign key is the "joint" that connects together
+pairs of rows which have a relationship with each other, and SQLAlchemy
+assigns very deep importance to this concept in virtually every area of its
+operation.
+
+In SQLAlchemy as well as in DDL, foreign key constraints can be defined as
+additional attributes within the table clause, or for single-column foreign
+keys they may optionally be specified within the definition of a single
+column. The single column foreign key is more common, and at the column level
+is specified by constructing a :class:`~sqlalchemy.schema.ForeignKey` object
+as an argument to a :class:`~sqlalchemy.schema.Column` object::
+
+ user_preference = Table('user_preference', metadata,
+ Column('pref_id', Integer, primary_key=True),
+ Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
+ Column('pref_name', String(40), nullable=False),
+ Column('pref_value', String(100))
+ )
+
+Above, we define a new table ``user_preference`` for which each row must
+contain a value in the ``user_id`` column that also exists in the ``user``
+table's ``user_id`` column.
+
+The argument to :class:`~sqlalchemy.schema.ForeignKey` is most commonly a
+string of the form *<tablename>.<columnname>*, or for a table in a remote
+schema or "owner" of the form *<schemaname>.<tablename>.<columnname>*. It may
+also be an actual :class:`~sqlalchemy.schema.Column` object, which as we'll
+see later is accessed from an existing :class:`~sqlalchemy.schema.Table`
+object via its ``c`` collection::
+
+ ForeignKey(user.c.user_id)
+
+The advantage to using a string is that the in-python linkage between ``user``
+and ``user_preference`` is resolved only when first needed, so that table
+objects can be easily spread across multiple modules and defined in any order.
+
+Foreign keys may also be defined at the table level, using the
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` object. This object can
+describe a single- or multi-column foreign key. A multi-column foreign key is
+known as a *composite* foreign key, and almost always references a table that
+has a composite primary key. Below we define a table ``invoice`` which has a
+composite primary key::
+
+ invoice = Table('invoice', metadata,
+ Column('invoice_id', Integer, primary_key=True),
+ Column('ref_num', Integer, primary_key=True),
+ Column('description', String(60), nullable=False)
+ )
+
+And then a table ``invoice_item`` with a composite foreign key referencing
+``invoice``::
+
+ invoice_item = Table('invoice_item', metadata,
+ Column('item_id', Integer, primary_key=True),
+ Column('item_name', String(60), nullable=False),
+ Column('invoice_id', Integer, nullable=False),
+ Column('ref_num', Integer, nullable=False),
+ ForeignKeyConstraint(['invoice_id', 'ref_num'], ['invoice.invoice_id', 'invoice.ref_num'])
+ )
+
+It's important to note that the
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` is the only way to define a
+composite foreign key. While we could also have placed individual
+:class:`~sqlalchemy.schema.ForeignKey` objects on both the
+``invoice_item.invoice_id`` and ``invoice_item.ref_num`` columns, SQLAlchemy
+would not be aware that these two values should be paired together - it would
+be two individual foreign key constraints instead of a single composite
+foreign key referencing two columns.
+
+.. _use_alter:
+
+Creating/Dropping Foreign Key Constraints via ALTER
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In all the above examples, the :class:`~sqlalchemy.schema.ForeignKey` object
+causes the "REFERENCES" keyword to be added inline to a column definition
+within a "CREATE TABLE" statement when
+:func:`~sqlalchemy.schema.MetaData.create_all` is issued, and
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` invokes the "CONSTRAINT"
+keyword inline with "CREATE TABLE". There are some cases where this is
+undesireable, particularly when two tables reference each other mutually, each
+with a foreign key referencing the other. In such a situation at least one of
+the foreign key constraints must be generated after both tables have been
+built. To support such a scheme, :class:`~sqlalchemy.schema.ForeignKey` and
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` offer the flag
+``use_alter=True``. When using this flag, the constraint will be generated
+using a definition similar to "ALTER TABLE <tablename> ADD CONSTRAINT <name>
+...". Since a name is required, the ``name`` attribute must also be specified.
+For example::
+
+ node = Table('node', meta,
+ Column('node_id', Integer, primary_key=True),
+ Column('primary_element', Integer,
+ ForeignKey('element.element_id', use_alter=True, name='fk_node_element_id')
+ )
+ )
+
+ element = Table('element', meta,
+ Column('element_id', Integer, primary_key=True),
+ Column('parent_node_id', Integer),
+ ForeignKeyConstraint(
+ ['parent_node_id'],
+ ['node.node_id'],
+ use_alter=True,
+ name='fk_element_parent_node_id'
+ )
+ )
+
+ON UPDATE and ON DELETE
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Most databases support *cascading* of foreign key values, that is the when a
+parent row is updated the new value is placed in child rows, or when the
+parent row is deleted all corresponding child rows are set to null or deleted.
+In data definition language these are specified using phrases like "ON UPDATE
+CASCADE", "ON DELETE CASCADE", and "ON DELETE SET NULL", corresponding to
+foreign key constraints. The phrase after "ON UPDATE" or "ON DELETE" may also
+other allow other phrases that are specific to the database in use. The
+:class:`~sqlalchemy.schema.ForeignKey` and
+:class:`~sqlalchemy.schema.ForeignKeyConstraint` objects support the
+generation of this clause via the ``onupdate`` and ``ondelete`` keyword
+arguments. The value is any string which will be output after the appropriate
+"ON UPDATE" or "ON DELETE" phrase::
+
+ child = Table('child', meta,
+ Column('id', Integer,
+ ForeignKey('parent.id', onupdate="CASCADE", ondelete="CASCADE"),
+ primary_key=True
+ )
+ )
+
+ composite = Table('composite', meta,
+ Column('id', Integer, primary_key=True),
+ Column('rev_id', Integer),
+ Column('note_id', Integer),
+ ForeignKeyConstraint(
+ ['rev_id', 'note_id'],
+ ['revisions.id', 'revisions.note_id'],
+ onupdate="CASCADE", ondelete="SET NULL"
+ )
+ )
+
+Note that these clauses are not supported on SQLite, and require ``InnoDB``
+tables when used with MySQL. They may also not be supported on other
+databases.
+
+
+UNIQUE Constraint
+-----------------
+
+Unique constraints can be created anonymously on a single column using the
+``unique`` keyword on :class:`~sqlalchemy.schema.Column`. Explicitly named
+unique constraints and/or those with multiple columns are created via the
+:class:`~sqlalchemy.schema.UniqueConstraint` table-level construct.
+
+.. sourcecode:: python+sql
+
+ from sqlalchemy import UniqueConstraint
+
+ meta = MetaData()
+ mytable = Table('mytable', meta,
+
+ # per-column anonymous unique constraint
+ Column('col1', Integer, unique=True),
+
+ Column('col2', Integer),
+ Column('col3', Integer),
+
+ # explicit/composite unique constraint. 'name' is optional.
+ UniqueConstraint('col2', 'col3', name='uix_1')
+ )
+
+CHECK Constraint
+----------------
+
+Check constraints can be named or unnamed and can be created at the Column or
+Table level, using the :class:`~sqlalchemy.schema.CheckConstraint` construct.
+The text of the check constraint is passed directly through to the database,
+so there is limited "database independent" behavior. Column level check
+constraints generally should only refer to the column to which they are
+placed, while table level constraints can refer to any columns in the table.
+
+Note that some databases do not actively support check constraints such as
+MySQL.
+
+.. sourcecode:: python+sql
+
+ from sqlalchemy import CheckConstraint
+
+ meta = MetaData()
+ mytable = Table('mytable', meta,
+
+ # per-column CHECK constraint
+ Column('col1', Integer, CheckConstraint('col1>5')),
+
+ Column('col2', Integer),
+ Column('col3', Integer),
+
+ # table level CHECK constraint. 'name' is optional.
+ CheckConstraint('col2 > col3 + 5', name='check1')
+ )
+
+ {sql}mytable.create(engine)
+ CREATE TABLE mytable (
+ col1 INTEGER CHECK (col1>5),
+ col2 INTEGER,
+ col3 INTEGER,
+ CONSTRAINT check1 CHECK (col2 > col3 + 5)
+ ){stop}
+
+PRIMARY KEY Constraint
+----------------------
+
+The primary key constraint of any :class:`.Table` object is implicitly
+present, based on the :class:`.Column` objects that are marked with the
+:paramref:`.Column.primary_key` flag. The :class:`.PrimaryKeyConstraint`
+object provides explicit access to this constraint, which includes the
+option of being configured directly::
+
+ from sqlalchemy import PrimaryKeyConstraint
+
+ my_table = Table('mytable', metadata,
+ Column('id', Integer),
+ Column('version_id', Integer),
+ Column('data', String(50)),
+ PrimaryKeyConstraint('id', 'version_id', name='mytable_pk')
+ )
+
+.. seealso::
+
+ :class:`.PrimaryKeyConstraint` - detailed API documentation.
+
+Setting up Constraints when using the Declarative ORM Extension
+----------------------------------------------------------------
+
+The :class:`.Table` is the SQLAlchemy Core construct that allows one to define
+table metadata, which among other things can be used by the SQLAlchemy ORM
+as a target to map a class. The :ref:`Declarative <declarative_toplevel>`
+extension allows the :class:`.Table` object to be created automatically, given
+the contents of the table primarily as a mapping of :class:`.Column` objects.
+
+To apply table-level constraint objects such as :class:`.ForeignKeyConstraint`
+to a table defined using Declarative, use the ``__table_args__`` attribute,
+described at :ref:`declarative_table_args`.
+
+Constraints API
+---------------
+.. autoclass:: Constraint
+
+
+.. autoclass:: CheckConstraint
+
+
+.. autoclass:: ColumnCollectionConstraint
+
+
+.. autoclass:: ForeignKey
+ :members:
+
+
+.. autoclass:: ForeignKeyConstraint
+ :members:
+
+
+.. autoclass:: PrimaryKeyConstraint
+
+
+.. autoclass:: UniqueConstraint
+
+
+.. _schema_indexes:
+
+Indexes
+-------
+
+Indexes can be created anonymously (using an auto-generated name ``ix_<column
+label>``) for a single column using the inline ``index`` keyword on
+:class:`~sqlalchemy.schema.Column`, which also modifies the usage of
+``unique`` to apply the uniqueness to the index itself, instead of adding a
+separate UNIQUE constraint. For indexes with specific names or which encompass
+more than one column, use the :class:`~sqlalchemy.schema.Index` construct,
+which requires a name.
+
+Below we illustrate a :class:`~sqlalchemy.schema.Table` with several
+:class:`~sqlalchemy.schema.Index` objects associated. The DDL for "CREATE
+INDEX" is issued right after the create statements for the table:
+
+.. sourcecode:: python+sql
+
+ meta = MetaData()
+ mytable = Table('mytable', meta,
+ # an indexed column, with index "ix_mytable_col1"
+ Column('col1', Integer, index=True),
+
+ # a uniquely indexed column with index "ix_mytable_col2"
+ Column('col2', Integer, index=True, unique=True),
+
+ Column('col3', Integer),
+ Column('col4', Integer),
+
+ Column('col5', Integer),
+ Column('col6', Integer),
+ )
+
+ # place an index on col3, col4
+ Index('idx_col34', mytable.c.col3, mytable.c.col4)
+
+ # place a unique index on col5, col6
+ Index('myindex', mytable.c.col5, mytable.c.col6, unique=True)
+
+ {sql}mytable.create(engine)
+ CREATE TABLE mytable (
+ col1 INTEGER,
+ col2 INTEGER,
+ col3 INTEGER,
+ col4 INTEGER,
+ col5 INTEGER,
+ col6 INTEGER
+ )
+ CREATE INDEX ix_mytable_col1 ON mytable (col1)
+ CREATE UNIQUE INDEX ix_mytable_col2 ON mytable (col2)
+ CREATE UNIQUE INDEX myindex ON mytable (col5, col6)
+ CREATE INDEX idx_col34 ON mytable (col3, col4){stop}
+
+Note in the example above, the :class:`.Index` construct is created
+externally to the table which it corresponds, using :class:`.Column`
+objects directly. :class:`.Index` also supports
+"inline" definition inside the :class:`.Table`, using string names to
+identify columns::
+
+ meta = MetaData()
+ mytable = Table('mytable', meta,
+ Column('col1', Integer),
+
+ Column('col2', Integer),
+
+ Column('col3', Integer),
+ Column('col4', Integer),
+
+ # place an index on col1, col2
+ Index('idx_col12', 'col1', 'col2'),
+
+ # place a unique index on col3, col4
+ Index('idx_col34', 'col3', 'col4', unique=True)
+ )
+
+.. versionadded:: 0.7
+ Support of "inline" definition inside the :class:`.Table`
+ for :class:`.Index`\ .
+
+The :class:`~sqlalchemy.schema.Index` object also supports its own ``create()`` method:
+
+.. sourcecode:: python+sql
+
+ i = Index('someindex', mytable.c.col5)
+ {sql}i.create(engine)
+ CREATE INDEX someindex ON mytable (col5){stop}
+
+.. _schema_indexes_functional:
+
+Functional Indexes
+~~~~~~~~~~~~~~~~~~~
+
+:class:`.Index` supports SQL and function expressions, as supported by the
+target backend. To create an index against a column using a descending
+value, the :meth:`.ColumnElement.desc` modifier may be used::
+
+ from sqlalchemy import Index
+
+ Index('someindex', mytable.c.somecol.desc())
+
+Or with a backend that supports functional indexes such as Postgresql,
+a "case insensitive" index can be created using the ``lower()`` function::
+
+ from sqlalchemy import func, Index
+
+ Index('someindex', func.lower(mytable.c.somecol))
+
+.. versionadded:: 0.8 :class:`.Index` supports SQL expressions and functions
+ as well as plain columns.
+
+Index API
+---------
+
+.. autoclass:: Index
+ :members:
diff --git a/doc/build/core/ddl.rst b/doc/build/core/ddl.rst
new file mode 100644
index 000000000..cee6f876e
--- /dev/null
+++ b/doc/build/core/ddl.rst
@@ -0,0 +1,287 @@
+.. _metadata_ddl_toplevel:
+.. _metadata_ddl:
+.. module:: sqlalchemy.schema
+
+Customizing DDL
+===============
+
+In the preceding sections we've discussed a variety of schema constructs
+including :class:`~sqlalchemy.schema.Table`,
+:class:`~sqlalchemy.schema.ForeignKeyConstraint`,
+:class:`~sqlalchemy.schema.CheckConstraint`, and
+:class:`~sqlalchemy.schema.Sequence`. Throughout, we've relied upon the
+``create()`` and :func:`~sqlalchemy.schema.MetaData.create_all` methods of
+:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.MetaData` in
+order to issue data definition language (DDL) for all constructs. When issued,
+a pre-determined order of operations is invoked, and DDL to create each table
+is created unconditionally including all constraints and other objects
+associated with it. For more complex scenarios where database-specific DDL is
+required, SQLAlchemy offers two techniques which can be used to add any DDL
+based on any condition, either accompanying the standard generation of tables
+or by itself.
+
+.. _schema_ddl_sequences:
+
+Controlling DDL Sequences
+-------------------------
+
+The ``sqlalchemy.schema`` package contains SQL expression constructs that
+provide DDL expressions. For example, to produce a ``CREATE TABLE`` statement:
+
+.. sourcecode:: python+sql
+
+ from sqlalchemy.schema import CreateTable
+ {sql}engine.execute(CreateTable(mytable))
+ CREATE TABLE mytable (
+ col1 INTEGER,
+ col2 INTEGER,
+ col3 INTEGER,
+ col4 INTEGER,
+ col5 INTEGER,
+ col6 INTEGER
+ ){stop}
+
+Above, the :class:`~sqlalchemy.schema.CreateTable` construct works like any
+other expression construct (such as ``select()``, ``table.insert()``, etc.). A
+full reference of available constructs is in :ref:`schema_api_ddl`.
+
+The DDL constructs all extend a common base class which provides the
+capability to be associated with an individual
+:class:`~sqlalchemy.schema.Table` or :class:`~sqlalchemy.schema.MetaData`
+object, to be invoked upon create/drop events. Consider the example of a table
+which contains a CHECK constraint:
+
+.. sourcecode:: python+sql
+
+ users = Table('users', metadata,
+ Column('user_id', Integer, primary_key=True),
+ Column('user_name', String(40), nullable=False),
+ CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
+ )
+
+ {sql}users.create(engine)
+ CREATE TABLE users (
+ user_id SERIAL NOT NULL,
+ user_name VARCHAR(40) NOT NULL,
+ PRIMARY KEY (user_id),
+ CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8)
+ ){stop}
+
+The above table contains a column "user_name" which is subject to a CHECK
+constraint that validates that the length of the string is at least eight
+characters. When a ``create()`` is issued for this table, DDL for the
+:class:`~sqlalchemy.schema.CheckConstraint` will also be issued inline within
+the table definition.
+
+The :class:`~sqlalchemy.schema.CheckConstraint` construct can also be
+constructed externally and associated with the
+:class:`~sqlalchemy.schema.Table` afterwards::
+
+ constraint = CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
+ users.append_constraint(constraint)
+
+So far, the effect is the same. However, if we create DDL elements
+corresponding to the creation and removal of this constraint, and associate
+them with the :class:`.Table` as events, these new events
+will take over the job of issuing DDL for the constraint. Additionally, the
+constraint will be added via ALTER:
+
+.. sourcecode:: python+sql
+
+ from sqlalchemy import event
+
+ event.listen(
+ users,
+ "after_create",
+ AddConstraint(constraint)
+ )
+ event.listen(
+ users,
+ "before_drop",
+ DropConstraint(constraint)
+ )
+
+ {sql}users.create(engine)
+ CREATE TABLE users (
+ user_id SERIAL NOT NULL,
+ user_name VARCHAR(40) NOT NULL,
+ PRIMARY KEY (user_id)
+ )
+
+ ALTER TABLE users ADD CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8){stop}
+
+ {sql}users.drop(engine)
+ ALTER TABLE users DROP CONSTRAINT cst_user_name_length
+ DROP TABLE users{stop}
+
+The real usefulness of the above becomes clearer once we illustrate the
+:meth:`.DDLElement.execute_if` method. This method returns a modified form of
+the DDL callable which will filter on criteria before responding to a
+received event. It accepts a parameter ``dialect``, which is the string
+name of a dialect or a tuple of such, which will limit the execution of the
+item to just those dialects. It also accepts a ``callable_`` parameter which
+may reference a Python callable which will be invoked upon event reception,
+returning ``True`` or ``False`` indicating if the event should proceed.
+
+If our :class:`~sqlalchemy.schema.CheckConstraint` was only supported by
+Postgresql and not other databases, we could limit its usage to just that dialect::
+
+ event.listen(
+ users,
+ 'after_create',
+ AddConstraint(constraint).execute_if(dialect='postgresql')
+ )
+ event.listen(
+ users,
+ 'before_drop',
+ DropConstraint(constraint).execute_if(dialect='postgresql')
+ )
+
+Or to any set of dialects::
+
+ event.listen(
+ users,
+ "after_create",
+ AddConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
+ )
+ event.listen(
+ users,
+ "before_drop",
+ DropConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
+ )
+
+When using a callable, the callable is passed the ddl element, the
+:class:`.Table` or :class:`.MetaData`
+object whose "create" or "drop" event is in progress, and the
+:class:`.Connection` object being used for the
+operation, as well as additional information as keyword arguments. The
+callable can perform checks, such as whether or not a given item already
+exists. Below we define ``should_create()`` and ``should_drop()`` callables
+that check for the presence of our named constraint:
+
+.. sourcecode:: python+sql
+
+ def should_create(ddl, target, connection, **kw):
+ row = connection.execute("select conname from pg_constraint where conname='%s'" % ddl.element.name).scalar()
+ return not bool(row)
+
+ def should_drop(ddl, target, connection, **kw):
+ return not should_create(ddl, target, connection, **kw)
+
+ event.listen(
+ users,
+ "after_create",
+ AddConstraint(constraint).execute_if(callable_=should_create)
+ )
+ event.listen(
+ users,
+ "before_drop",
+ DropConstraint(constraint).execute_if(callable_=should_drop)
+ )
+
+ {sql}users.create(engine)
+ CREATE TABLE users (
+ user_id SERIAL NOT NULL,
+ user_name VARCHAR(40) NOT NULL,
+ PRIMARY KEY (user_id)
+ )
+
+ select conname from pg_constraint where conname='cst_user_name_length'
+ ALTER TABLE users ADD CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8){stop}
+
+ {sql}users.drop(engine)
+ select conname from pg_constraint where conname='cst_user_name_length'
+ ALTER TABLE users DROP CONSTRAINT cst_user_name_length
+ DROP TABLE users{stop}
+
+Custom DDL
+----------
+
+Custom DDL phrases are most easily achieved using the
+:class:`~sqlalchemy.schema.DDL` construct. This construct works like all the
+other DDL elements except it accepts a string which is the text to be emitted:
+
+.. sourcecode:: python+sql
+
+ event.listen(
+ metadata,
+ "after_create",
+ DDL("ALTER TABLE users ADD CONSTRAINT "
+ "cst_user_name_length "
+ " CHECK (length(user_name) >= 8)")
+ )
+
+A more comprehensive method of creating libraries of DDL constructs is to use
+custom compilation - see :ref:`sqlalchemy.ext.compiler_toplevel` for
+details.
+
+.. _schema_api_ddl:
+
+DDL Expression Constructs API
+-----------------------------
+
+.. autoclass:: DDLElement
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DDL
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateTable
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropTable
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateColumn
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateSequence
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropSequence
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateIndex
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropIndex
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: AddConstraint
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropConstraint
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: CreateSchema
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: DropSchema
+ :members:
+ :undoc-members:
+
+
diff --git a/doc/build/core/defaults.rst b/doc/build/core/defaults.rst
new file mode 100644
index 000000000..166273c18
--- /dev/null
+++ b/doc/build/core/defaults.rst
@@ -0,0 +1,345 @@
+.. _metadata_defaults_toplevel:
+.. _metadata_defaults:
+.. module:: sqlalchemy.schema
+
+Column Insert/Update Defaults
+==============================
+
+SQLAlchemy provides a very rich featureset regarding column level events which
+take place during INSERT and UPDATE statements. Options include:
+
+* Scalar values used as defaults during INSERT and UPDATE operations
+* Python functions which execute upon INSERT and UPDATE operations
+* SQL expressions which are embedded in INSERT statements (or in some cases execute beforehand)
+* SQL expressions which are embedded in UPDATE statements
+* Server side default values used during INSERT
+* Markers for server-side triggers used during UPDATE
+
+The general rule for all insert/update defaults is that they only take effect
+if no value for a particular column is passed as an ``execute()`` parameter;
+otherwise, the given value is used.
+
+Scalar Defaults
+---------------
+
+The simplest kind of default is a scalar value used as the default value of a column::
+
+ Table("mytable", meta,
+ Column("somecolumn", Integer, default=12)
+ )
+
+Above, the value "12" will be bound as the column value during an INSERT if no
+other value is supplied.
+
+A scalar value may also be associated with an UPDATE statement, though this is
+not very common (as UPDATE statements are usually looking for dynamic
+defaults)::
+
+ Table("mytable", meta,
+ Column("somecolumn", Integer, onupdate=25)
+ )
+
+
+Python-Executed Functions
+-------------------------
+
+The ``default`` and ``onupdate`` keyword arguments also accept Python
+functions. These functions are invoked at the time of insert or update if no
+other value for that column is supplied, and the value returned is used for
+the column's value. Below illustrates a crude "sequence" that assigns an
+incrementing counter to a primary key column::
+
+ # a function which counts upwards
+ i = 0
+ def mydefault():
+ global i
+ i += 1
+ return i
+
+ t = Table("mytable", meta,
+ Column('id', Integer, primary_key=True, default=mydefault),
+ )
+
+It should be noted that for real "incrementing sequence" behavior, the
+built-in capabilities of the database should normally be used, which may
+include sequence objects or other autoincrementing capabilities. For primary
+key columns, SQLAlchemy will in most cases use these capabilities
+automatically. See the API documentation for
+:class:`~sqlalchemy.schema.Column` including the ``autoincrement`` flag, as
+well as the section on :class:`~sqlalchemy.schema.Sequence` later in this
+chapter for background on standard primary key generation techniques.
+
+To illustrate onupdate, we assign the Python ``datetime`` function ``now`` to
+the ``onupdate`` attribute::
+
+ import datetime
+
+ t = Table("mytable", meta,
+ Column('id', Integer, primary_key=True),
+
+ # define 'last_updated' to be populated with datetime.now()
+ Column('last_updated', DateTime, onupdate=datetime.datetime.now),
+ )
+
+When an update statement executes and no value is passed for ``last_updated``,
+the ``datetime.datetime.now()`` Python function is executed and its return
+value used as the value for ``last_updated``. Notice that we provide ``now``
+as the function itself without calling it (i.e. there are no parenthesis
+following) - SQLAlchemy will execute the function at the time the statement
+executes.
+
+Context-Sensitive Default Functions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Python functions used by ``default`` and ``onupdate`` may also make use of
+the current statement's context in order to determine a value. The `context`
+of a statement is an internal SQLAlchemy object which contains all information
+about the statement being executed, including its source expression, the
+parameters associated with it and the cursor. The typical use case for this
+context with regards to default generation is to have access to the other
+values being inserted or updated on the row. To access the context, provide a
+function that accepts a single ``context`` argument::
+
+ def mydefault(context):
+ return context.current_parameters['counter'] + 12
+
+ t = Table('mytable', meta,
+ Column('counter', Integer),
+ Column('counter_plus_twelve', Integer, default=mydefault, onupdate=mydefault)
+ )
+
+Above we illustrate a default function which will execute for all INSERT and
+UPDATE statements where a value for ``counter_plus_twelve`` was otherwise not
+provided, and the value will be that of whatever value is present in the
+execution for the ``counter`` column, plus the number 12.
+
+While the context object passed to the default function has many attributes,
+the ``current_parameters`` member is a special member provided only during the
+execution of a default function for the purposes of deriving defaults from its
+existing values. For a single statement that is executing many sets of bind
+parameters, the user-defined function is called for each set of parameters,
+and ``current_parameters`` will be provided with each individual parameter set
+for each execution.
+
+SQL Expressions
+---------------
+
+The "default" and "onupdate" keywords may also be passed SQL expressions,
+including select statements or direct function calls::
+
+ t = Table("mytable", meta,
+ Column('id', Integer, primary_key=True),
+
+ # define 'create_date' to default to now()
+ Column('create_date', DateTime, default=func.now()),
+
+ # define 'key' to pull its default from the 'keyvalues' table
+ Column('key', String(20), default=keyvalues.select(keyvalues.c.type='type1', limit=1)),
+
+ # define 'last_modified' to use the current_timestamp SQL function on update
+ Column('last_modified', DateTime, onupdate=func.utc_timestamp())
+ )
+
+Above, the ``create_date`` column will be populated with the result of the
+``now()`` SQL function (which, depending on backend, compiles into ``NOW()``
+or ``CURRENT_TIMESTAMP`` in most cases) during an INSERT statement, and the
+``key`` column with the result of a SELECT subquery from another table. The
+``last_modified`` column will be populated with the value of
+``UTC_TIMESTAMP()``, a function specific to MySQL, when an UPDATE statement is
+emitted for this table.
+
+Note that when using ``func`` functions, unlike when using Python `datetime`
+functions we *do* call the function, i.e. with parenthesis "()" - this is
+because what we want in this case is the return value of the function, which
+is the SQL expression construct that will be rendered into the INSERT or
+UPDATE statement.
+
+The above SQL functions are usually executed "inline" with the INSERT or
+UPDATE statement being executed, meaning, a single statement is executed which
+embeds the given expressions or subqueries within the VALUES or SET clause of
+the statement. Although in some cases, the function is "pre-executed" in a
+SELECT statement of its own beforehand. This happens when all of the following
+is true:
+
+* the column is a primary key column
+* the database dialect does not support a usable ``cursor.lastrowid`` accessor
+ (or equivalent); this currently includes PostgreSQL, Oracle, and Firebird, as
+ well as some MySQL dialects.
+* the dialect does not support the "RETURNING" clause or similar, or the
+ ``implicit_returning`` flag is set to ``False`` for the dialect. Dialects
+ which support RETURNING currently include Postgresql, Oracle, Firebird, and
+ MS-SQL.
+* the statement is a single execution, i.e. only supplies one set of
+ parameters and doesn't use "executemany" behavior
+* the ``inline=True`` flag is not set on the
+ :class:`~sqlalchemy.sql.expression.Insert()` or
+ :class:`~sqlalchemy.sql.expression.Update()` construct, and the statement has
+ not defined an explicit `returning()` clause.
+
+Whether or not the default generation clause "pre-executes" is not something
+that normally needs to be considered, unless it is being addressed for
+performance reasons.
+
+When the statement is executed with a single set of parameters (that is, it is
+not an "executemany" style execution), the returned
+:class:`~sqlalchemy.engine.ResultProxy` will contain a collection
+accessible via ``result.postfetch_cols()`` which contains a list of all
+:class:`~sqlalchemy.schema.Column` objects which had an inline-executed
+default. Similarly, all parameters which were bound to the statement,
+including all Python and SQL expressions which were pre-executed, are present
+in the ``last_inserted_params()`` or ``last_updated_params()`` collections on
+:class:`~sqlalchemy.engine.ResultProxy`. The ``inserted_primary_key``
+collection contains a list of primary key values for the row inserted (a list
+so that single-column and composite-column primary keys are represented in the
+same format).
+
+Server Side Defaults
+--------------------
+
+A variant on the SQL expression default is the ``server_default``, which gets
+placed in the CREATE TABLE statement during a ``create()`` operation:
+
+.. sourcecode:: python+sql
+
+ t = Table('test', meta,
+ Column('abc', String(20), server_default='abc'),
+ Column('created_at', DateTime, server_default=text("sysdate"))
+ )
+
+A create call for the above table will produce::
+
+ CREATE TABLE test (
+ abc varchar(20) default 'abc',
+ created_at datetime default sysdate
+ )
+
+The behavior of ``server_default`` is similar to that of a regular SQL
+default; if it's placed on a primary key column for a database which doesn't
+have a way to "postfetch" the ID, and the statement is not "inlined", the SQL
+expression is pre-executed; otherwise, SQLAlchemy lets the default fire off on
+the database side normally.
+
+
+.. _triggered_columns:
+
+Triggered Columns
+------------------
+
+Columns with values set by a database trigger or other external process may be
+called out using :class:`.FetchedValue` as a marker::
+
+ t = Table('test', meta,
+ Column('abc', String(20), server_default=FetchedValue()),
+ Column('def', String(20), server_onupdate=FetchedValue())
+ )
+
+.. versionchanged:: 0.8.0b2,0.7.10
+ The ``for_update`` argument on :class:`.FetchedValue` is set automatically
+ when specified as the ``server_onupdate`` argument. If using an older version,
+ specify the onupdate above as ``server_onupdate=FetchedValue(for_update=True)``.
+
+These markers do not emit a "default" clause when the table is created,
+however they do set the same internal flags as a static ``server_default``
+clause, providing hints to higher-level tools that a "post-fetch" of these
+rows should be performed after an insert or update.
+
+.. note::
+
+ It's generally not appropriate to use :class:`.FetchedValue` in
+ conjunction with a primary key column, particularly when using the
+ ORM or any other scenario where the :attr:`.ResultProxy.inserted_primary_key`
+ attribute is required. This is becaue the "post-fetch" operation requires
+ that the primary key value already be available, so that the
+ row can be selected on its primary key.
+
+ For a server-generated primary key value, all databases provide special
+ accessors or other techniques in order to acquire the "last inserted
+ primary key" column of a table. These mechanisms aren't affected by the presence
+ of :class:`.FetchedValue`. For special situations where triggers are
+ used to generate primary key values, and the database in use does not
+ support the ``RETURNING`` clause, it may be necessary to forego the usage
+ of the trigger and instead apply the SQL expression or function as a
+ "pre execute" expression::
+
+ t = Table('test', meta,
+ Column('abc', MyType, default=func.generate_new_value(), primary_key=True)
+ )
+
+ Where above, when :meth:`.Table.insert` is used,
+ the ``func.generate_new_value()`` expression will be pre-executed
+ in the context of a scalar ``SELECT`` statement, and the new value will
+ be applied to the subsequent ``INSERT``, while at the same time being
+ made available to the :attr:`.ResultProxy.inserted_primary_key`
+ attribute.
+
+
+Defining Sequences
+-------------------
+
+SQLAlchemy represents database sequences using the
+:class:`~sqlalchemy.schema.Sequence` object, which is considered to be a
+special case of "column default". It only has an effect on databases which
+have explicit support for sequences, which currently includes Postgresql,
+Oracle, and Firebird. The :class:`~sqlalchemy.schema.Sequence` object is
+otherwise ignored.
+
+The :class:`~sqlalchemy.schema.Sequence` may be placed on any column as a
+"default" generator to be used during INSERT operations, and can also be
+configured to fire off during UPDATE operations if desired. It is most
+commonly used in conjunction with a single integer primary key column::
+
+ table = Table("cartitems", meta,
+ Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True),
+ Column("description", String(40)),
+ Column("createdate", DateTime())
+ )
+
+Where above, the table "cartitems" is associated with a sequence named
+"cart_id_seq". When INSERT statements take place for "cartitems", and no value
+is passed for the "cart_id" column, the "cart_id_seq" sequence will be used to
+generate a value.
+
+When the :class:`~sqlalchemy.schema.Sequence` is associated with a table,
+CREATE and DROP statements issued for that table will also issue CREATE/DROP
+for the sequence object as well, thus "bundling" the sequence object with its
+parent table.
+
+The :class:`~sqlalchemy.schema.Sequence` object also implements special
+functionality to accommodate Postgresql's SERIAL datatype. The SERIAL type in
+PG automatically generates a sequence that is used implicitly during inserts.
+This means that if a :class:`~sqlalchemy.schema.Table` object defines a
+:class:`~sqlalchemy.schema.Sequence` on its primary key column so that it
+works with Oracle and Firebird, the :class:`~sqlalchemy.schema.Sequence` would
+get in the way of the "implicit" sequence that PG would normally use. For this
+use case, add the flag ``optional=True`` to the
+:class:`~sqlalchemy.schema.Sequence` object - this indicates that the
+:class:`~sqlalchemy.schema.Sequence` should only be used if the database
+provides no other option for generating primary key identifiers.
+
+The :class:`~sqlalchemy.schema.Sequence` object also has the ability to be
+executed standalone like a SQL expression, which has the effect of calling its
+"next value" function::
+
+ seq = Sequence('some_sequence')
+ nextid = connection.execute(seq)
+
+Default Objects API
+-------------------
+
+.. autoclass:: ColumnDefault
+
+
+.. autoclass:: DefaultClause
+
+
+.. autoclass:: DefaultGenerator
+
+
+.. autoclass:: FetchedValue
+
+
+.. autoclass:: PassiveDefault
+
+
+.. autoclass:: Sequence
+ :members:
diff --git a/doc/build/core/dml.rst b/doc/build/core/dml.rst
new file mode 100644
index 000000000..3b6949b79
--- /dev/null
+++ b/doc/build/core/dml.rst
@@ -0,0 +1,37 @@
+Insert, Updates, Deletes
+========================
+
+INSERT, UPDATE and DELETE statements build on a hierarchy starting
+with :class:`.UpdateBase`. The :class:`.Insert` and :class:`.Update`
+constructs build on the intermediary :class:`.ValuesBase`.
+
+.. module:: sqlalchemy.sql.expression
+
+.. autofunction:: delete
+
+.. autofunction:: insert
+
+.. autofunction:: update
+
+
+.. autoclass:: Delete
+ :members:
+ :inherited-members:
+
+.. autoclass:: Insert
+ :members:
+ :inherited-members:
+
+.. autoclass:: Update
+ :members:
+ :inherited-members:
+
+.. autoclass:: sqlalchemy.sql.expression.UpdateBase
+ :members:
+ :inherited-members:
+
+.. autoclass:: sqlalchemy.sql.expression.ValuesBase
+ :members:
+
+
+
diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst
index eea5041d7..8d34ab5c6 100644
--- a/doc/build/core/engines.rst
+++ b/doc/build/core/engines.rst
@@ -87,6 +87,8 @@ known driver available for that backend (i.e. cx_oracle, pysqlite/sqlite3,
psycopg2, mysqldb). For Jython connections, specify the `zxjdbc` driver, which
is the JDBC-DBAPI bridge included with Jython.
+.. autofunction:: sqlalchemy.engine.url.make_url
+
Postgresql
----------
@@ -125,7 +127,7 @@ More notes on connecting to MySQL at :ref:`mysql_toplevel`.
Oracle
------
-cx_oracle is usualjy used here::
+cx_oracle is usually used here::
engine = create_engine('oracle://scott:tiger@127.0.0.1:1521/sidname')
diff --git a/doc/build/core/event.rst b/doc/build/core/event.rst
index 73d0dab4c..1b873972a 100644
--- a/doc/build/core/event.rst
+++ b/doc/build/core/event.rst
@@ -75,7 +75,7 @@ as long as the names match up::
print("New DBAPI connection:", dbapi_connection)
print("Connection record:", kw['connection_record'])
-Above, the presence of ``**kw`` tells :func:`.event.listen_for` that
+Above, the presence of ``**kw`` tells :func:`.listens_for` that
arguments should be passed to the function by name, rather than positionally.
.. versionadded:: 0.9.0 Added optional ``named`` argument dispatch to
@@ -159,3 +159,6 @@ API Reference
.. autofunction:: sqlalchemy.event.listens_for
+.. autofunction:: sqlalchemy.event.remove
+
+.. autofunction:: sqlalchemy.event.contains
diff --git a/doc/build/core/events.rst b/doc/build/core/events.rst
index f43aa09f6..d52d50c5a 100644
--- a/doc/build/core/events.rst
+++ b/doc/build/core/events.rst
@@ -8,6 +8,9 @@ SQLAlchemy Core.
For an introduction to the event listening API, see :ref:`event_toplevel`.
ORM events are described in :ref:`orm_event_toplevel`.
+.. autoclass:: sqlalchemy.event.base.Events
+ :members:
+
.. versionadded:: 0.7
The event system supercedes the previous system of "extension", "listener",
and "proxy" classes.
diff --git a/doc/build/core/exceptions.rst b/doc/build/core/exceptions.rst
index f7d384ad9..30270f8b0 100644
--- a/doc/build/core/exceptions.rst
+++ b/doc/build/core/exceptions.rst
@@ -2,5 +2,4 @@ Core Exceptions
===============
.. automodule:: sqlalchemy.exc
- :show-inheritance:
:members: \ No newline at end of file
diff --git a/doc/build/core/expression_api.rst b/doc/build/core/expression_api.rst
index b17145c53..99bb98881 100644
--- a/doc/build/core/expression_api.rst
+++ b/doc/build/core/expression_api.rst
@@ -8,249 +8,13 @@ SQL Statements and Expressions API
This section presents the API reference for the SQL Expression Language. For a full introduction to its usage,
see :ref:`sqlexpression_toplevel`.
-Functions
----------
-The expression package uses functions to construct SQL expressions. The return value of each function is an object instance which is a subclass of :class:`~sqlalchemy.sql.expression.ClauseElement`.
-
-.. autofunction:: alias
-
-.. autofunction:: and_
-
-.. autofunction:: asc
-
-.. autofunction:: between
-
-.. autofunction:: bindparam
-
-.. autofunction:: case
-
-.. autofunction:: cast
-
-.. autofunction:: sqlalchemy.sql.expression.column
-
-.. autofunction:: collate
-
-.. autofunction:: delete
-
-.. autofunction:: desc
-
-.. autofunction:: distinct
-
-.. autofunction:: except_
-
-.. autofunction:: except_all
-
-.. autofunction:: exists
-
-.. autofunction:: extract
-
-.. autofunction:: false
-
-.. autodata:: func
-
-.. autofunction:: insert
-
-.. autofunction:: intersect
-
-.. autofunction:: intersect_all
-
-.. autofunction:: join
-
-.. autofunction:: label
-
-.. autofunction:: literal
-
-.. autofunction:: literal_column
-
-.. autofunction:: not_
-
-.. autofunction:: null
-
-.. autofunction:: nullsfirst
-
-.. autofunction:: nullslast
-
-.. autofunction:: or_
-
-.. autofunction:: outparam
-
-.. autofunction:: outerjoin
-
-.. autofunction:: over
-
-.. autofunction:: select
-
-.. autofunction:: subquery
-
-.. autofunction:: sqlalchemy.sql.expression.table
-
-.. autofunction:: text
-
-.. autofunction:: true
-
-.. autofunction:: tuple_
-
-.. autofunction:: type_coerce
-
-.. autofunction:: union
-
-.. autofunction:: union_all
-
-.. autofunction:: update
-
-Classes
--------
-
-.. autoclass:: Alias
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: BinaryExpression
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: BindParameter
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: ClauseElement
- :members:
- :show-inheritance:
-
-.. autoclass:: ClauseList
- :members:
- :show-inheritance:
-
-.. autoclass:: ColumnClause
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: ColumnCollection
- :members:
- :show-inheritance:
-
-.. autoclass:: ColumnElement
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: sqlalchemy.sql.operators.ColumnOperators
- :members:
- :special-members:
- :inherited-members:
- :show-inheritance:
-
-
-.. autoclass:: CompoundSelect
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: sqlalchemy.sql.operators.custom_op
- :members:
-
-.. autoclass:: CTE
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Delete
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Executable
- :members:
- :show-inheritance:
-
-.. autoclass:: FunctionElement
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Function
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: FromClause
- :members:
- :show-inheritance:
-
-.. autoclass:: Insert
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Join
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: sqlalchemy.sql.operators.Operators
- :members:
- :special-members:
-
-.. autoclass:: Select
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: Selectable
- :members:
- :show-inheritance:
-
-.. autoclass:: SelectBase
- :members:
- :show-inheritance:
-
-.. autoclass:: TableClause
- :members:
- :show-inheritance:
- :inherited-members:
-
-.. autoclass:: UnaryExpression
- :members:
- :show-inheritance:
-
-.. autoclass:: Update
- :members:
- :inherited-members:
- :show-inheritance:
-
-.. autoclass:: UpdateBase
- :members:
- :show-inheritance:
-
-.. autoclass:: ValuesBase
- :members:
- :show-inheritance:
-
-.. _generic_functions:
-
-Generic Functions
------------------
-
-SQL functions which are known to SQLAlchemy with regards to database-specific
-rendering, return types and argument behavior. Generic functions are invoked
-like all SQL functions, using the :attr:`func` attribute::
-
- select([func.count()]).select_from(sometable)
-
-Note that any name not known to :attr:`func` generates the function name as is
-- there is no restriction on what SQL functions can be called, known or
-unknown to SQLAlchemy, built-in or user defined. The section here only
-describes those functions where SQLAlchemy already knows what argument and
-return types are in use.
-
-.. automodule:: sqlalchemy.sql.functions
- :members:
- :undoc-members:
- :show-inheritance:
+.. toctree::
+ :maxdepth: 1
+ sqlelement
+ selectable
+ dml
+ functions
+ types
diff --git a/doc/build/core/functions.rst b/doc/build/core/functions.rst
new file mode 100644
index 000000000..d284d125f
--- /dev/null
+++ b/doc/build/core/functions.rst
@@ -0,0 +1,27 @@
+.. _functions_toplevel:
+.. _generic_functions:
+
+=========================
+SQL and Generic Functions
+=========================
+
+.. module:: sqlalchemy.sql.expression
+
+SQL functions which are known to SQLAlchemy with regards to database-specific
+rendering, return types and argument behavior. Generic functions are invoked
+like all SQL functions, using the :attr:`func` attribute::
+
+ select([func.count()]).select_from(sometable)
+
+Note that any name not known to :attr:`func` generates the function name as is
+- there is no restriction on what SQL functions can be called, known or
+unknown to SQLAlchemy, built-in or user defined. The section here only
+describes those functions where SQLAlchemy already knows what argument and
+return types are in use.
+
+.. automodule:: sqlalchemy.sql.functions
+ :members:
+ :undoc-members:
+
+
+
diff --git a/doc/build/core/index.rst b/doc/build/core/index.rst
index 079a4b97a..210f28412 100644
--- a/doc/build/core/index.rst
+++ b/doc/build/core/index.rst
@@ -13,11 +13,10 @@ Language provides a schema-centric usage paradigm.
tutorial
expression_api
+ schema
engines
connections
pooling
- schema
- types
event
events
compiler
diff --git a/doc/build/core/internals.rst b/doc/build/core/internals.rst
index 64dc34183..1a85e9e6c 100644
--- a/doc/build/core/internals.rst
+++ b/doc/build/core/internals.rst
@@ -12,12 +12,10 @@ Some key internal constructs are listed here.
.. autoclass:: sqlalchemy.sql.compiler.DDLCompiler
:members:
- :show-inheritance:
:inherited-members:
.. autoclass:: sqlalchemy.engine.default.DefaultDialect
:members:
- :show-inheritance:
:inherited-members:
.. autoclass:: sqlalchemy.engine.interfaces.Dialect
@@ -25,17 +23,17 @@ Some key internal constructs are listed here.
.. autoclass:: sqlalchemy.engine.default.DefaultExecutionContext
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.engine.interfaces.ExecutionContext
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.sql.compiler.IdentifierPreparer
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.sql.compiler.SQLCompiler
:members:
- :show-inheritance:
+
diff --git a/doc/build/core/metadata.rst b/doc/build/core/metadata.rst
new file mode 100644
index 000000000..d6fc8c6af
--- /dev/null
+++ b/doc/build/core/metadata.rst
@@ -0,0 +1,330 @@
+.. _metadata_toplevel:
+
+.. _metadata_describing_toplevel:
+
+.. _metadata_describing:
+
+==================================
+Describing Databases with MetaData
+==================================
+
+.. module:: sqlalchemy.schema
+
+This section discusses the fundamental :class:`.Table`, :class:`.Column`
+and :class:`.MetaData` objects.
+
+A collection of metadata entities is stored in an object aptly named
+:class:`~sqlalchemy.schema.MetaData`::
+
+ from sqlalchemy import *
+
+ metadata = MetaData()
+
+:class:`~sqlalchemy.schema.MetaData` is a container object that keeps together
+many different features of a database (or multiple databases) being described.
+
+To represent a table, use the :class:`~sqlalchemy.schema.Table` class. Its two
+primary arguments are the table name, then the
+:class:`~sqlalchemy.schema.MetaData` object which it will be associated with.
+The remaining positional arguments are mostly
+:class:`~sqlalchemy.schema.Column` objects describing each column::
+
+ user = Table('user', metadata,
+ Column('user_id', Integer, primary_key = True),
+ Column('user_name', String(16), nullable = False),
+ Column('email_address', String(60)),
+ Column('password', String(20), nullable = False)
+ )
+
+Above, a table called ``user`` is described, which contains four columns. The
+primary key of the table consists of the ``user_id`` column. Multiple columns
+may be assigned the ``primary_key=True`` flag which denotes a multi-column
+primary key, known as a *composite* primary key.
+
+Note also that each column describes its datatype using objects corresponding
+to genericized types, such as :class:`~sqlalchemy.types.Integer` and
+:class:`~sqlalchemy.types.String`. SQLAlchemy features dozens of types of
+varying levels of specificity as well as the ability to create custom types.
+Documentation on the type system can be found at :ref:`types`.
+
+Accessing Tables and Columns
+----------------------------
+
+The :class:`~sqlalchemy.schema.MetaData` object contains all of the schema
+constructs we've associated with it. It supports a few methods of accessing
+these table objects, such as the ``sorted_tables`` accessor which returns a
+list of each :class:`~sqlalchemy.schema.Table` object in order of foreign key
+dependency (that is, each table is preceded by all tables which it
+references)::
+
+ >>> for t in metadata.sorted_tables:
+ ... print t.name
+ user
+ user_preference
+ invoice
+ invoice_item
+
+In most cases, individual :class:`~sqlalchemy.schema.Table` objects have been
+explicitly declared, and these objects are typically accessed directly as
+module-level variables in an application. Once a
+:class:`~sqlalchemy.schema.Table` has been defined, it has a full set of
+accessors which allow inspection of its properties. Given the following
+:class:`~sqlalchemy.schema.Table` definition::
+
+ employees = Table('employees', metadata,
+ Column('employee_id', Integer, primary_key=True),
+ Column('employee_name', String(60), nullable=False),
+ Column('employee_dept', Integer, ForeignKey("departments.department_id"))
+ )
+
+Note the :class:`~sqlalchemy.schema.ForeignKey` object used in this table -
+this construct defines a reference to a remote table, and is fully described
+in :ref:`metadata_foreignkeys`. Methods of accessing information about this
+table include::
+
+ # access the column "EMPLOYEE_ID":
+ employees.columns.employee_id
+
+ # or just
+ employees.c.employee_id
+
+ # via string
+ employees.c['employee_id']
+
+ # iterate through all columns
+ for c in employees.c:
+ print c
+
+ # get the table's primary key columns
+ for primary_key in employees.primary_key:
+ print primary_key
+
+ # get the table's foreign key objects:
+ for fkey in employees.foreign_keys:
+ print fkey
+
+ # access the table's MetaData:
+ employees.metadata
+
+ # access the table's bound Engine or Connection, if its MetaData is bound:
+ employees.bind
+
+ # access a column's name, type, nullable, primary key, foreign key
+ employees.c.employee_id.name
+ employees.c.employee_id.type
+ employees.c.employee_id.nullable
+ employees.c.employee_id.primary_key
+ employees.c.employee_dept.foreign_keys
+
+ # get the "key" of a column, which defaults to its name, but can
+ # be any user-defined string:
+ employees.c.employee_name.key
+
+ # access a column's table:
+ employees.c.employee_id.table is employees
+
+ # get the table related by a foreign key
+ list(employees.c.employee_dept.foreign_keys)[0].column.table
+
+Creating and Dropping Database Tables
+-------------------------------------
+
+Once you've defined some :class:`~sqlalchemy.schema.Table` objects, assuming
+you're working with a brand new database one thing you might want to do is
+issue CREATE statements for those tables and their related constructs (as an
+aside, it's also quite possible that you *don't* want to do this, if you
+already have some preferred methodology such as tools included with your
+database or an existing scripting system - if that's the case, feel free to
+skip this section - SQLAlchemy has no requirement that it be used to create
+your tables).
+
+The usual way to issue CREATE is to use
+:func:`~sqlalchemy.schema.MetaData.create_all` on the
+:class:`~sqlalchemy.schema.MetaData` object. This method will issue queries
+that first check for the existence of each individual table, and if not found
+will issue the CREATE statements:
+
+ .. sourcecode:: python+sql
+
+ engine = create_engine('sqlite:///:memory:')
+
+ metadata = MetaData()
+
+ user = Table('user', metadata,
+ Column('user_id', Integer, primary_key = True),
+ Column('user_name', String(16), nullable = False),
+ Column('email_address', String(60), key='email'),
+ Column('password', String(20), nullable = False)
+ )
+
+ user_prefs = Table('user_prefs', metadata,
+ Column('pref_id', Integer, primary_key=True),
+ Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
+ Column('pref_name', String(40), nullable=False),
+ Column('pref_value', String(100))
+ )
+
+ {sql}metadata.create_all(engine)
+ PRAGMA table_info(user){}
+ CREATE TABLE user(
+ user_id INTEGER NOT NULL PRIMARY KEY,
+ user_name VARCHAR(16) NOT NULL,
+ email_address VARCHAR(60),
+ password VARCHAR(20) NOT NULL
+ )
+ PRAGMA table_info(user_prefs){}
+ CREATE TABLE user_prefs(
+ pref_id INTEGER NOT NULL PRIMARY KEY,
+ user_id INTEGER NOT NULL REFERENCES user(user_id),
+ pref_name VARCHAR(40) NOT NULL,
+ pref_value VARCHAR(100)
+ )
+
+:func:`~sqlalchemy.schema.MetaData.create_all` creates foreign key constraints
+between tables usually inline with the table definition itself, and for this
+reason it also generates the tables in order of their dependency. There are
+options to change this behavior such that ``ALTER TABLE`` is used instead.
+
+Dropping all tables is similarly achieved using the
+:func:`~sqlalchemy.schema.MetaData.drop_all` method. This method does the
+exact opposite of :func:`~sqlalchemy.schema.MetaData.create_all` - the
+presence of each table is checked first, and tables are dropped in reverse
+order of dependency.
+
+Creating and dropping individual tables can be done via the ``create()`` and
+``drop()`` methods of :class:`~sqlalchemy.schema.Table`. These methods by
+default issue the CREATE or DROP regardless of the table being present:
+
+.. sourcecode:: python+sql
+
+ engine = create_engine('sqlite:///:memory:')
+
+ meta = MetaData()
+
+ employees = Table('employees', meta,
+ Column('employee_id', Integer, primary_key=True),
+ Column('employee_name', String(60), nullable=False, key='name'),
+ Column('employee_dept', Integer, ForeignKey("departments.department_id"))
+ )
+ {sql}employees.create(engine)
+ CREATE TABLE employees(
+ employee_id SERIAL NOT NULL PRIMARY KEY,
+ employee_name VARCHAR(60) NOT NULL,
+ employee_dept INTEGER REFERENCES departments(department_id)
+ )
+ {}
+
+``drop()`` method:
+
+.. sourcecode:: python+sql
+
+ {sql}employees.drop(engine)
+ DROP TABLE employees
+ {}
+
+To enable the "check first for the table existing" logic, add the
+``checkfirst=True`` argument to ``create()`` or ``drop()``::
+
+ employees.create(engine, checkfirst=True)
+ employees.drop(engine, checkfirst=False)
+
+.. _schema_migrations:
+
+Altering Schemas through Migrations
+-----------------------------------
+
+While SQLAlchemy directly supports emitting CREATE and DROP statements for schema
+constructs, the ability to alter those constructs, usually via the ALTER statement
+as well as other database-specific constructs, is outside of the scope of SQLAlchemy
+itself. While it's easy enough to emit ALTER statements and similar by hand,
+such as by passing a string to :meth:`.Connection.execute` or by using the
+:class:`.DDL` construct, it's a common practice to automate the maintenance of
+database schemas in relation to application code using schema migration tools.
+
+There are two major migration tools available for SQLAlchemy:
+
+* `Alembic <http://alembic.readthedocs.org>`_ - Written by the author of SQLAlchemy,
+ Alembic features a highly customizable environment and a minimalistic usage pattern,
+ supporting such features as transactional DDL, automatic generation of "candidate"
+ migrations, an "offline" mode which generates SQL scripts, and support for branch
+ resolution.
+* `SQLAlchemy-Migrate <http://code.google.com/p/sqlalchemy-migrate/>`_ - The original
+ migration tool for SQLAlchemy, SQLAlchemy-Migrate is widely used and continues
+ under active development. SQLAlchemy-Migrate includes features such as
+ SQL script generation, ORM class generation, ORM model comparison, and extensive
+ support for SQLite migrations.
+
+
+Specifying the Schema Name
+---------------------------
+
+Some databases support the concept of multiple schemas. A
+:class:`~sqlalchemy.schema.Table` can reference this by specifying the
+``schema`` keyword argument::
+
+ financial_info = Table('financial_info', meta,
+ Column('id', Integer, primary_key=True),
+ Column('value', String(100), nullable=False),
+ schema='remote_banks'
+ )
+
+Within the :class:`~sqlalchemy.schema.MetaData` collection, this table will be
+identified by the combination of ``financial_info`` and ``remote_banks``. If
+another table called ``financial_info`` is referenced without the
+``remote_banks`` schema, it will refer to a different
+:class:`~sqlalchemy.schema.Table`. :class:`~sqlalchemy.schema.ForeignKey`
+objects can specify references to columns in this table using the form
+``remote_banks.financial_info.id``.
+
+The ``schema`` argument should be used for any name qualifiers required,
+including Oracle's "owner" attribute and similar. It also can accommodate a
+dotted name for longer schemes::
+
+ schema="dbo.scott"
+
+Backend-Specific Options
+------------------------
+
+:class:`~sqlalchemy.schema.Table` supports database-specific options. For
+example, MySQL has different table backend types, including "MyISAM" and
+"InnoDB". This can be expressed with :class:`~sqlalchemy.schema.Table` using
+``mysql_engine``::
+
+ addresses = Table('engine_email_addresses', meta,
+ Column('address_id', Integer, primary_key = True),
+ Column('remote_user_id', Integer, ForeignKey(users.c.user_id)),
+ Column('email_address', String(20)),
+ mysql_engine='InnoDB'
+ )
+
+Other backends may support table-level options as well - these would be
+described in the individual documentation sections for each dialect.
+
+Column, Table, MetaData API
+---------------------------
+
+.. autoclass:: Column
+ :members:
+ :inherited-members:
+ :undoc-members:
+
+
+.. autoclass:: MetaData
+ :members:
+ :undoc-members:
+
+
+.. autoclass:: SchemaItem
+ :members:
+
+.. autoclass:: Table
+ :members:
+ :inherited-members:
+ :undoc-members:
+
+
+.. autoclass:: ThreadLocalMetaData
+ :members:
+ :undoc-members:
+
+
diff --git a/doc/build/core/pooling.rst b/doc/build/core/pooling.rst
index eb5463075..fcd8fd55c 100644
--- a/doc/build/core/pooling.rst
+++ b/doc/build/core/pooling.rst
@@ -282,6 +282,51 @@ server at the point at which the script pauses for input::
print c.execute("select 1").fetchall()
c.close()
+.. _pool_connection_invalidation:
+
+More on Invalidation
+^^^^^^^^^^^^^^^^^^^^
+
+The :class:`.Pool` provides "connection invalidation" services which allow
+both explicit invalidation of a connection as well as automatic invalidation
+in response to conditions that are determined to render a connection unusable.
+
+"Invalidation" means that a particular DBAPI connection is removed from the
+pool and discarded. The ``.close()`` method is called on this connection
+if it is not clear that the connection itself might not be closed, however
+if this method fails, the exception is logged but the operation still proceeds.
+
+When using a :class:`.Engine`, the :meth:`.Connection.invalidate` method is
+the usual entrypoint to explicit invalidation. Other conditions by which
+a DBAPI connection might be invalidated include:
+
+* a DBAPI exception such as :class:`.OperationalError`, raised when a
+ method like ``connection.execute()`` is called, is detected as indicating
+ a so-called "disconnect" condition. As the Python DBAPI provides no
+ standard system for determining the nature of an exception, all SQLAlchemy
+ dialects include a system called ``is_disconnect()`` which will examine
+ the contents of an exception object, including the string message and
+ any potential error codes included with it, in order to determine if this
+ exception indicates that the connection is no longer usable. If this is the
+ case, the :meth:`._ConnectionFairy.invalidate` method is called and the
+ DBAPI connection is then discarded.
+
+* When the connection is returned to the pool, and
+ calling the ``connection.rollback()`` or ``connection.commit()`` methods,
+ as dictated by the pool's "reset on return" behavior, throws an exception.
+ A final attempt at calling ``.close()`` on the connection will be made,
+ and it is then discarded.
+
+* When a listener implementing :meth:`.PoolEvents.checkout` raises the
+ :class:`~sqlalchemy.exc.DisconnectionError` exception, indicating that the connection
+ won't be usable and a new connection attempt needs to be made.
+
+All invalidations which occur will invoke the :meth:`.PoolEvents.invalidate`
+event.
+
+
+
+
API Documentation - Available Pool Implementations
---------------------------------------------------
@@ -291,25 +336,33 @@ API Documentation - Available Pool Implementations
.. automethod:: connect
.. automethod:: dispose
.. automethod:: recreate
+ .. automethod:: unique_connection
.. autoclass:: sqlalchemy.pool.QueuePool
- :show-inheritance:
.. automethod:: __init__
+ .. automethod:: connect
+ .. automethod:: unique_connection
.. autoclass:: SingletonThreadPool
- :show-inheritance:
.. automethod:: __init__
.. autoclass:: AssertionPool
- :show-inheritance:
+
.. autoclass:: NullPool
- :show-inheritance:
+
.. autoclass:: StaticPool
- :show-inheritance:
+
+.. autoclass:: _ConnectionFairy
+ :members:
+
+ .. autoattribute:: _connection_record
+
+.. autoclass:: _ConnectionRecord
+ :members:
Pooling Plain DB-API Connections
diff --git a/doc/build/core/reflection.rst b/doc/build/core/reflection.rst
new file mode 100644
index 000000000..952f48f74
--- /dev/null
+++ b/doc/build/core/reflection.rst
@@ -0,0 +1,168 @@
+.. module:: sqlalchemy.schema
+
+.. _metadata_reflection_toplevel:
+.. _metadata_reflection:
+
+
+Reflecting Database Objects
+===========================
+
+A :class:`~sqlalchemy.schema.Table` object can be instructed to load
+information about itself from the corresponding database schema object already
+existing within the database. This process is called *reflection*. In the
+most simple case you need only specify the table name, a :class:`~sqlalchemy.schema.MetaData`
+object, and the ``autoload=True`` flag. If the
+:class:`~sqlalchemy.schema.MetaData` is not persistently bound, also add the
+``autoload_with`` argument::
+
+ >>> messages = Table('messages', meta, autoload=True, autoload_with=engine)
+ >>> [c.name for c in messages.columns]
+ ['message_id', 'message_name', 'date']
+
+The above operation will use the given engine to query the database for
+information about the ``messages`` table, and will then generate
+:class:`~sqlalchemy.schema.Column`, :class:`~sqlalchemy.schema.ForeignKey`,
+and other objects corresponding to this information as though the
+:class:`~sqlalchemy.schema.Table` object were hand-constructed in Python.
+
+When tables are reflected, if a given table references another one via foreign
+key, a second :class:`~sqlalchemy.schema.Table` object is created within the
+:class:`~sqlalchemy.schema.MetaData` object representing the connection.
+Below, assume the table ``shopping_cart_items`` references a table named
+``shopping_carts``. Reflecting the ``shopping_cart_items`` table has the
+effect such that the ``shopping_carts`` table will also be loaded::
+
+ >>> shopping_cart_items = Table('shopping_cart_items', meta, autoload=True, autoload_with=engine)
+ >>> 'shopping_carts' in meta.tables:
+ True
+
+The :class:`~sqlalchemy.schema.MetaData` has an interesting "singleton-like"
+behavior such that if you requested both tables individually,
+:class:`~sqlalchemy.schema.MetaData` will ensure that exactly one
+:class:`~sqlalchemy.schema.Table` object is created for each distinct table
+name. The :class:`~sqlalchemy.schema.Table` constructor actually returns to
+you the already-existing :class:`~sqlalchemy.schema.Table` object if one
+already exists with the given name. Such as below, we can access the already
+generated ``shopping_carts`` table just by naming it::
+
+ shopping_carts = Table('shopping_carts', meta)
+
+Of course, it's a good idea to use ``autoload=True`` with the above table
+regardless. This is so that the table's attributes will be loaded if they have
+not been already. The autoload operation only occurs for the table if it
+hasn't already been loaded; once loaded, new calls to
+:class:`~sqlalchemy.schema.Table` with the same name will not re-issue any
+reflection queries.
+
+Overriding Reflected Columns
+-----------------------------
+
+Individual columns can be overridden with explicit values when reflecting
+tables; this is handy for specifying custom datatypes, constraints such as
+primary keys that may not be configured within the database, etc.::
+
+ >>> mytable = Table('mytable', meta,
+ ... Column('id', Integer, primary_key=True), # override reflected 'id' to have primary key
+ ... Column('mydata', Unicode(50)), # override reflected 'mydata' to be Unicode
+ ... autoload=True)
+
+Reflecting Views
+-----------------
+
+The reflection system can also reflect views. Basic usage is the same as that
+of a table::
+
+ my_view = Table("some_view", metadata, autoload=True)
+
+Above, ``my_view`` is a :class:`~sqlalchemy.schema.Table` object with
+:class:`~sqlalchemy.schema.Column` objects representing the names and types of
+each column within the view "some_view".
+
+Usually, it's desired to have at least a primary key constraint when
+reflecting a view, if not foreign keys as well. View reflection doesn't
+extrapolate these constraints.
+
+Use the "override" technique for this, specifying explicitly those columns
+which are part of the primary key or have foreign key constraints::
+
+ my_view = Table("some_view", metadata,
+ Column("view_id", Integer, primary_key=True),
+ Column("related_thing", Integer, ForeignKey("othertable.thing_id")),
+ autoload=True
+ )
+
+Reflecting All Tables at Once
+-----------------------------
+
+The :class:`~sqlalchemy.schema.MetaData` object can also get a listing of
+tables and reflect the full set. This is achieved by using the
+:func:`~sqlalchemy.schema.MetaData.reflect` method. After calling it, all
+located tables are present within the :class:`~sqlalchemy.schema.MetaData`
+object's dictionary of tables::
+
+ meta = MetaData()
+ meta.reflect(bind=someengine)
+ users_table = meta.tables['users']
+ addresses_table = meta.tables['addresses']
+
+``metadata.reflect()`` also provides a handy way to clear or delete all the rows in a database::
+
+ meta = MetaData()
+ meta.reflect(bind=someengine)
+ for table in reversed(meta.sorted_tables):
+ someengine.execute(table.delete())
+
+.. _metadata_reflection_inspector:
+
+Fine Grained Reflection with Inspector
+--------------------------------------
+
+A low level interface which provides a backend-agnostic system of loading
+lists of schema, table, column, and constraint descriptions from a given
+database is also available. This is known as the "Inspector"::
+
+ from sqlalchemy import create_engine
+ from sqlalchemy.engine import reflection
+ engine = create_engine('...')
+ insp = reflection.Inspector.from_engine(engine)
+ print insp.get_table_names()
+
+.. autoclass:: sqlalchemy.engine.reflection.Inspector
+ :members:
+ :undoc-members:
+
+Limitations of Reflection
+-------------------------
+
+It's important to note that the reflection process recreates :class:`.Table`
+metadata using only information which is represented in the relational database.
+This process by definition cannot restore aspects of a schema that aren't
+actually stored in the database. State which is not available from reflection
+includes but is not limited to:
+
+* Client side defaults, either Python functions or SQL expressions defined using
+ the ``default`` keyword of :class:`.Column` (note this is separate from ``server_default``,
+ which specifically is what's available via reflection).
+
+* Column information, e.g. data that might have been placed into the
+ :attr:`.Column.info` dictionary
+
+* The value of the ``.quote`` setting for :class:`.Column` or :class:`.Table`
+
+* The assocation of a particular :class:`.Sequence` with a given :class:`.Column`
+
+The relational database also in many cases reports on table metadata in a
+different format than what was specified in SQLAlchemy. The :class:`.Table`
+objects returned from reflection cannot be always relied upon to produce the identical
+DDL as the original Python-defined :class:`.Table` objects. Areas where
+this occurs includes server defaults, column-associated sequences and various
+idosyncrasies regarding constraints and datatypes. Server side defaults may
+be returned with cast directives (typically Postgresql will include a ``::<type>``
+cast) or different quoting patterns than originally specified.
+
+Another category of limitation includes schema structures for which reflection
+is only partially or not yet defined. Recent improvements to reflection allow
+things like views, indexes and foreign key options to be reflected. As of this
+writing, structures like CHECK constraints, table comments, and triggers are
+not reflected.
+
diff --git a/doc/build/core/schema.rst b/doc/build/core/schema.rst
index b2caf870f..aeb04be18 100644
--- a/doc/build/core/schema.rst
+++ b/doc/build/core/schema.rst
@@ -1,4 +1,4 @@
-.. _metadata_toplevel:
+.. _schema_toplevel:
==========================
Schema Definition Language
@@ -6,11 +6,8 @@ Schema Definition Language
.. module:: sqlalchemy.schema
-
-.. _metadata_describing:
-
-Describing Databases with MetaData
-==================================
+This section references SQLAlchemy **schema metadata**, a comprehensive system of describing and inspecting
+database schemas.
The core of SQLAlchemy's query and object mapping operations are supported by
*database metadata*, which is comprised of Python objects that describe tables
@@ -35,1453 +32,14 @@ designed to be used in a *declarative* style which closely resembles that of
real DDL. They are therefore most intuitive to those who have some background
in creating real schema generation scripts.
-A collection of metadata entities is stored in an object aptly named
-:class:`~sqlalchemy.schema.MetaData`::
-
- from sqlalchemy import *
-
- metadata = MetaData()
-
-:class:`~sqlalchemy.schema.MetaData` is a container object that keeps together
-many different features of a database (or multiple databases) being described.
-
-To represent a table, use the :class:`~sqlalchemy.schema.Table` class. Its two
-primary arguments are the table name, then the
-:class:`~sqlalchemy.schema.MetaData` object which it will be associated with.
-The remaining positional arguments are mostly
-:class:`~sqlalchemy.schema.Column` objects describing each column::
-
- user = Table('user', metadata,
- Column('user_id', Integer, primary_key = True),
- Column('user_name', String(16), nullable = False),
- Column('email_address', String(60)),
- Column('password', String(20), nullable = False)
- )
-
-Above, a table called ``user`` is described, which contains four columns. The
-primary key of the table consists of the ``user_id`` column. Multiple columns
-may be assigned the ``primary_key=True`` flag which denotes a multi-column
-primary key, known as a *composite* primary key.
-
-Note also that each column describes its datatype using objects corresponding
-to genericized types, such as :class:`~sqlalchemy.types.Integer` and
-:class:`~sqlalchemy.types.String`. SQLAlchemy features dozens of types of
-varying levels of specificity as well as the ability to create custom types.
-Documentation on the type system can be found at :ref:`types`.
-
-Accessing Tables and Columns
-----------------------------
-
-The :class:`~sqlalchemy.schema.MetaData` object contains all of the schema
-constructs we've associated with it. It supports a few methods of accessing
-these table objects, such as the ``sorted_tables`` accessor which returns a
-list of each :class:`~sqlalchemy.schema.Table` object in order of foreign key
-dependency (that is, each table is preceded by all tables which it
-references)::
-
- >>> for t in metadata.sorted_tables:
- ... print t.name
- user
- user_preference
- invoice
- invoice_item
-
-In most cases, individual :class:`~sqlalchemy.schema.Table` objects have been
-explicitly declared, and these objects are typically accessed directly as
-module-level variables in an application. Once a
-:class:`~sqlalchemy.schema.Table` has been defined, it has a full set of
-accessors which allow inspection of its properties. Given the following
-:class:`~sqlalchemy.schema.Table` definition::
-
- employees = Table('employees', metadata,
- Column('employee_id', Integer, primary_key=True),
- Column('employee_name', String(60), nullable=False),
- Column('employee_dept', Integer, ForeignKey("departments.department_id"))
- )
-
-Note the :class:`~sqlalchemy.schema.ForeignKey` object used in this table -
-this construct defines a reference to a remote table, and is fully described
-in :ref:`metadata_foreignkeys`. Methods of accessing information about this
-table include::
-
- # access the column "EMPLOYEE_ID":
- employees.columns.employee_id
-
- # or just
- employees.c.employee_id
-
- # via string
- employees.c['employee_id']
-
- # iterate through all columns
- for c in employees.c:
- print c
-
- # get the table's primary key columns
- for primary_key in employees.primary_key:
- print primary_key
-
- # get the table's foreign key objects:
- for fkey in employees.foreign_keys:
- print fkey
-
- # access the table's MetaData:
- employees.metadata
-
- # access the table's bound Engine or Connection, if its MetaData is bound:
- employees.bind
-
- # access a column's name, type, nullable, primary key, foreign key
- employees.c.employee_id.name
- employees.c.employee_id.type
- employees.c.employee_id.nullable
- employees.c.employee_id.primary_key
- employees.c.employee_dept.foreign_keys
-
- # get the "key" of a column, which defaults to its name, but can
- # be any user-defined string:
- employees.c.employee_name.key
-
- # access a column's table:
- employees.c.employee_id.table is employees
-
- # get the table related by a foreign key
- list(employees.c.employee_dept.foreign_keys)[0].column.table
-
-Creating and Dropping Database Tables
--------------------------------------
-
-Once you've defined some :class:`~sqlalchemy.schema.Table` objects, assuming
-you're working with a brand new database one thing you might want to do is
-issue CREATE statements for those tables and their related constructs (as an
-aside, it's also quite possible that you *don't* want to do this, if you
-already have some preferred methodology such as tools included with your
-database or an existing scripting system - if that's the case, feel free to
-skip this section - SQLAlchemy has no requirement that it be used to create
-your tables).
-
-The usual way to issue CREATE is to use
-:func:`~sqlalchemy.schema.MetaData.create_all` on the
-:class:`~sqlalchemy.schema.MetaData` object. This method will issue queries
-that first check for the existence of each individual table, and if not found
-will issue the CREATE statements:
-
- .. sourcecode:: python+sql
-
- engine = create_engine('sqlite:///:memory:')
-
- metadata = MetaData()
-
- user = Table('user', metadata,
- Column('user_id', Integer, primary_key = True),
- Column('user_name', String(16), nullable = False),
- Column('email_address', String(60), key='email'),
- Column('password', String(20), nullable = False)
- )
-
- user_prefs = Table('user_prefs', metadata,
- Column('pref_id', Integer, primary_key=True),
- Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
- Column('pref_name', String(40), nullable=False),
- Column('pref_value', String(100))
- )
-
- {sql}metadata.create_all(engine)
- PRAGMA table_info(user){}
- CREATE TABLE user(
- user_id INTEGER NOT NULL PRIMARY KEY,
- user_name VARCHAR(16) NOT NULL,
- email_address VARCHAR(60),
- password VARCHAR(20) NOT NULL
- )
- PRAGMA table_info(user_prefs){}
- CREATE TABLE user_prefs(
- pref_id INTEGER NOT NULL PRIMARY KEY,
- user_id INTEGER NOT NULL REFERENCES user(user_id),
- pref_name VARCHAR(40) NOT NULL,
- pref_value VARCHAR(100)
- )
-
-:func:`~sqlalchemy.schema.MetaData.create_all` creates foreign key constraints
-between tables usually inline with the table definition itself, and for this
-reason it also generates the tables in order of their dependency. There are
-options to change this behavior such that ``ALTER TABLE`` is used instead.
-
-Dropping all tables is similarly achieved using the
-:func:`~sqlalchemy.schema.MetaData.drop_all` method. This method does the
-exact opposite of :func:`~sqlalchemy.schema.MetaData.create_all` - the
-presence of each table is checked first, and tables are dropped in reverse
-order of dependency.
-
-Creating and dropping individual tables can be done via the ``create()`` and
-``drop()`` methods of :class:`~sqlalchemy.schema.Table`. These methods by
-default issue the CREATE or DROP regardless of the table being present:
-
-.. sourcecode:: python+sql
-
- engine = create_engine('sqlite:///:memory:')
-
- meta = MetaData()
-
- employees = Table('employees', meta,
- Column('employee_id', Integer, primary_key=True),
- Column('employee_name', String(60), nullable=False, key='name'),
- Column('employee_dept', Integer, ForeignKey("departments.department_id"))
- )
- {sql}employees.create(engine)
- CREATE TABLE employees(
- employee_id SERIAL NOT NULL PRIMARY KEY,
- employee_name VARCHAR(60) NOT NULL,
- employee_dept INTEGER REFERENCES departments(department_id)
- )
- {}
-
-``drop()`` method:
-
-.. sourcecode:: python+sql
-
- {sql}employees.drop(engine)
- DROP TABLE employees
- {}
-
-To enable the "check first for the table existing" logic, add the
-``checkfirst=True`` argument to ``create()`` or ``drop()``::
-
- employees.create(engine, checkfirst=True)
- employees.drop(engine, checkfirst=False)
-
-Altering Schemas through Migrations
------------------------------------
-
-While SQLAlchemy directly supports emitting CREATE and DROP statements for schema
-constructs, the ability to alter those constructs, usually via the ALTER statement
-as well as other database-specific constructs, is outside of the scope of SQLAlchemy
-itself. While it's easy enough to emit ALTER statements and similar by hand,
-such as by passing a string to :meth:`.Connection.execute` or by using the
-:class:`.DDL` construct, it's a common practice to automate the maintenance of
-database schemas in relation to application code using schema migration tools.
-
-There are two major migration tools available for SQLAlchemy:
-
-* `Alembic <http://alembic.readthedocs.org>`_ - Written by the author of SQLAlchemy,
- Alembic features a highly customizable environment and a minimalistic usage pattern,
- supporting such features as transactional DDL, automatic generation of "candidate"
- migrations, an "offline" mode which generates SQL scripts, and support for branch
- resolution.
-* `SQLAlchemy-Migrate <http://code.google.com/p/sqlalchemy-migrate/>`_ - The original
- migration tool for SQLAlchemy, SQLAlchemy-Migrate is widely used and continues
- under active development. SQLAlchemy-Migrate includes features such as
- SQL script generation, ORM class generation, ORM model comparison, and extensive
- support for SQLite migrations.
-
-
-Specifying the Schema Name
----------------------------
-
-Some databases support the concept of multiple schemas. A
-:class:`~sqlalchemy.schema.Table` can reference this by specifying the
-``schema`` keyword argument::
-
- financial_info = Table('financial_info', meta,
- Column('id', Integer, primary_key=True),
- Column('value', String(100), nullable=False),
- schema='remote_banks'
- )
-
-Within the :class:`~sqlalchemy.schema.MetaData` collection, this table will be
-identified by the combination of ``financial_info`` and ``remote_banks``. If
-another table called ``financial_info`` is referenced without the
-``remote_banks`` schema, it will refer to a different
-:class:`~sqlalchemy.schema.Table`. :class:`~sqlalchemy.schema.ForeignKey`
-objects can specify references to columns in this table using the form
-``remote_banks.financial_info.id``.
-
-The ``schema`` argument should be used for any name qualifiers required,
-including Oracle's "owner" attribute and similar. It also can accommodate a
-dotted name for longer schemes::
-
- schema="dbo.scott"
-
-Backend-Specific Options
-------------------------
-
-:class:`~sqlalchemy.schema.Table` supports database-specific options. For
-example, MySQL has different table backend types, including "MyISAM" and
-"InnoDB". This can be expressed with :class:`~sqlalchemy.schema.Table` using
-``mysql_engine``::
-
- addresses = Table('engine_email_addresses', meta,
- Column('address_id', Integer, primary_key = True),
- Column('remote_user_id', Integer, ForeignKey(users.c.user_id)),
- Column('email_address', String(20)),
- mysql_engine='InnoDB'
- )
-
-Other backends may support table-level options as well - these would be
-described in the individual documentation sections for each dialect.
-
-Column, Table, MetaData API
----------------------------
-
-.. autoclass:: Column
- :members:
- :inherited-members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: MetaData
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: SchemaItem
- :show-inheritance:
- :members:
-
-.. autoclass:: Table
- :members:
- :inherited-members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: ThreadLocalMetaData
- :members:
- :undoc-members:
- :show-inheritance:
-
-
-.. _metadata_reflection:
-
-Reflecting Database Objects
-===========================
-
-A :class:`~sqlalchemy.schema.Table` object can be instructed to load
-information about itself from the corresponding database schema object already
-existing within the database. This process is called *reflection*. In the
-most simple case you need only specify the table name, a :class:`~sqlalchemy.schema.MetaData`
-object, and the ``autoload=True`` flag. If the
-:class:`~sqlalchemy.schema.MetaData` is not persistently bound, also add the
-``autoload_with`` argument::
-
- >>> messages = Table('messages', meta, autoload=True, autoload_with=engine)
- >>> [c.name for c in messages.columns]
- ['message_id', 'message_name', 'date']
-
-The above operation will use the given engine to query the database for
-information about the ``messages`` table, and will then generate
-:class:`~sqlalchemy.schema.Column`, :class:`~sqlalchemy.schema.ForeignKey`,
-and other objects corresponding to this information as though the
-:class:`~sqlalchemy.schema.Table` object were hand-constructed in Python.
-
-When tables are reflected, if a given table references another one via foreign
-key, a second :class:`~sqlalchemy.schema.Table` object is created within the
-:class:`~sqlalchemy.schema.MetaData` object representing the connection.
-Below, assume the table ``shopping_cart_items`` references a table named
-``shopping_carts``. Reflecting the ``shopping_cart_items`` table has the
-effect such that the ``shopping_carts`` table will also be loaded::
-
- >>> shopping_cart_items = Table('shopping_cart_items', meta, autoload=True, autoload_with=engine)
- >>> 'shopping_carts' in meta.tables:
- True
-
-The :class:`~sqlalchemy.schema.MetaData` has an interesting "singleton-like"
-behavior such that if you requested both tables individually,
-:class:`~sqlalchemy.schema.MetaData` will ensure that exactly one
-:class:`~sqlalchemy.schema.Table` object is created for each distinct table
-name. The :class:`~sqlalchemy.schema.Table` constructor actually returns to
-you the already-existing :class:`~sqlalchemy.schema.Table` object if one
-already exists with the given name. Such as below, we can access the already
-generated ``shopping_carts`` table just by naming it::
-
- shopping_carts = Table('shopping_carts', meta)
-
-Of course, it's a good idea to use ``autoload=True`` with the above table
-regardless. This is so that the table's attributes will be loaded if they have
-not been already. The autoload operation only occurs for the table if it
-hasn't already been loaded; once loaded, new calls to
-:class:`~sqlalchemy.schema.Table` with the same name will not re-issue any
-reflection queries.
-
-Overriding Reflected Columns
------------------------------
-
-Individual columns can be overridden with explicit values when reflecting
-tables; this is handy for specifying custom datatypes, constraints such as
-primary keys that may not be configured within the database, etc.::
-
- >>> mytable = Table('mytable', meta,
- ... Column('id', Integer, primary_key=True), # override reflected 'id' to have primary key
- ... Column('mydata', Unicode(50)), # override reflected 'mydata' to be Unicode
- ... autoload=True)
-
-Reflecting Views
------------------
-
-The reflection system can also reflect views. Basic usage is the same as that
-of a table::
-
- my_view = Table("some_view", metadata, autoload=True)
-
-Above, ``my_view`` is a :class:`~sqlalchemy.schema.Table` object with
-:class:`~sqlalchemy.schema.Column` objects representing the names and types of
-each column within the view "some_view".
-
-Usually, it's desired to have at least a primary key constraint when
-reflecting a view, if not foreign keys as well. View reflection doesn't
-extrapolate these constraints.
-
-Use the "override" technique for this, specifying explicitly those columns
-which are part of the primary key or have foreign key constraints::
-
- my_view = Table("some_view", metadata,
- Column("view_id", Integer, primary_key=True),
- Column("related_thing", Integer, ForeignKey("othertable.thing_id")),
- autoload=True
- )
-
-Reflecting All Tables at Once
------------------------------
-
-The :class:`~sqlalchemy.schema.MetaData` object can also get a listing of
-tables and reflect the full set. This is achieved by using the
-:func:`~sqlalchemy.schema.MetaData.reflect` method. After calling it, all
-located tables are present within the :class:`~sqlalchemy.schema.MetaData`
-object's dictionary of tables::
-
- meta = MetaData()
- meta.reflect(bind=someengine)
- users_table = meta.tables['users']
- addresses_table = meta.tables['addresses']
-
-``metadata.reflect()`` also provides a handy way to clear or delete all the rows in a database::
-
- meta = MetaData()
- meta.reflect(bind=someengine)
- for table in reversed(meta.sorted_tables):
- someengine.execute(table.delete())
-
-Fine Grained Reflection with Inspector
---------------------------------------
-
-A low level interface which provides a backend-agnostic system of loading
-lists of schema, table, column, and constraint descriptions from a given
-database is also available. This is known as the "Inspector"::
-
- from sqlalchemy import create_engine
- from sqlalchemy.engine import reflection
- engine = create_engine('...')
- insp = reflection.Inspector.from_engine(engine)
- print insp.get_table_names()
-
-.. autoclass:: sqlalchemy.engine.reflection.Inspector
- :members:
- :undoc-members:
- :show-inheritance:
-
-
-.. _metadata_defaults:
-
-Column Insert/Update Defaults
-==============================
-
-SQLAlchemy provides a very rich featureset regarding column level events which
-take place during INSERT and UPDATE statements. Options include:
-
-* Scalar values used as defaults during INSERT and UPDATE operations
-* Python functions which execute upon INSERT and UPDATE operations
-* SQL expressions which are embedded in INSERT statements (or in some cases execute beforehand)
-* SQL expressions which are embedded in UPDATE statements
-* Server side default values used during INSERT
-* Markers for server-side triggers used during UPDATE
-
-The general rule for all insert/update defaults is that they only take effect
-if no value for a particular column is passed as an ``execute()`` parameter;
-otherwise, the given value is used.
-
-Scalar Defaults
----------------
-
-The simplest kind of default is a scalar value used as the default value of a column::
-
- Table("mytable", meta,
- Column("somecolumn", Integer, default=12)
- )
-
-Above, the value "12" will be bound as the column value during an INSERT if no
-other value is supplied.
-
-A scalar value may also be associated with an UPDATE statement, though this is
-not very common (as UPDATE statements are usually looking for dynamic
-defaults)::
-
- Table("mytable", meta,
- Column("somecolumn", Integer, onupdate=25)
- )
-
-
-Python-Executed Functions
--------------------------
-
-The ``default`` and ``onupdate`` keyword arguments also accept Python
-functions. These functions are invoked at the time of insert or update if no
-other value for that column is supplied, and the value returned is used for
-the column's value. Below illustrates a crude "sequence" that assigns an
-incrementing counter to a primary key column::
-
- # a function which counts upwards
- i = 0
- def mydefault():
- global i
- i += 1
- return i
-
- t = Table("mytable", meta,
- Column('id', Integer, primary_key=True, default=mydefault),
- )
-
-It should be noted that for real "incrementing sequence" behavior, the
-built-in capabilities of the database should normally be used, which may
-include sequence objects or other autoincrementing capabilities. For primary
-key columns, SQLAlchemy will in most cases use these capabilities
-automatically. See the API documentation for
-:class:`~sqlalchemy.schema.Column` including the ``autoincrement`` flag, as
-well as the section on :class:`~sqlalchemy.schema.Sequence` later in this
-chapter for background on standard primary key generation techniques.
-
-To illustrate onupdate, we assign the Python ``datetime`` function ``now`` to
-the ``onupdate`` attribute::
-
- import datetime
-
- t = Table("mytable", meta,
- Column('id', Integer, primary_key=True),
-
- # define 'last_updated' to be populated with datetime.now()
- Column('last_updated', DateTime, onupdate=datetime.datetime.now),
- )
-
-When an update statement executes and no value is passed for ``last_updated``,
-the ``datetime.datetime.now()`` Python function is executed and its return
-value used as the value for ``last_updated``. Notice that we provide ``now``
-as the function itself without calling it (i.e. there are no parenthesis
-following) - SQLAlchemy will execute the function at the time the statement
-executes.
-
-Context-Sensitive Default Functions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The Python functions used by ``default`` and ``onupdate`` may also make use of
-the current statement's context in order to determine a value. The `context`
-of a statement is an internal SQLAlchemy object which contains all information
-about the statement being executed, including its source expression, the
-parameters associated with it and the cursor. The typical use case for this
-context with regards to default generation is to have access to the other
-values being inserted or updated on the row. To access the context, provide a
-function that accepts a single ``context`` argument::
-
- def mydefault(context):
- return context.current_parameters['counter'] + 12
-
- t = Table('mytable', meta,
- Column('counter', Integer),
- Column('counter_plus_twelve', Integer, default=mydefault, onupdate=mydefault)
- )
-
-Above we illustrate a default function which will execute for all INSERT and
-UPDATE statements where a value for ``counter_plus_twelve`` was otherwise not
-provided, and the value will be that of whatever value is present in the
-execution for the ``counter`` column, plus the number 12.
-
-While the context object passed to the default function has many attributes,
-the ``current_parameters`` member is a special member provided only during the
-execution of a default function for the purposes of deriving defaults from its
-existing values. For a single statement that is executing many sets of bind
-parameters, the user-defined function is called for each set of parameters,
-and ``current_parameters`` will be provided with each individual parameter set
-for each execution.
-
-SQL Expressions
----------------
-
-The "default" and "onupdate" keywords may also be passed SQL expressions,
-including select statements or direct function calls::
-
- t = Table("mytable", meta,
- Column('id', Integer, primary_key=True),
-
- # define 'create_date' to default to now()
- Column('create_date', DateTime, default=func.now()),
-
- # define 'key' to pull its default from the 'keyvalues' table
- Column('key', String(20), default=keyvalues.select(keyvalues.c.type='type1', limit=1)),
-
- # define 'last_modified' to use the current_timestamp SQL function on update
- Column('last_modified', DateTime, onupdate=func.utc_timestamp())
- )
-
-Above, the ``create_date`` column will be populated with the result of the
-``now()`` SQL function (which, depending on backend, compiles into ``NOW()``
-or ``CURRENT_TIMESTAMP`` in most cases) during an INSERT statement, and the
-``key`` column with the result of a SELECT subquery from another table. The
-``last_modified`` column will be populated with the value of
-``UTC_TIMESTAMP()``, a function specific to MySQL, when an UPDATE statement is
-emitted for this table.
-
-Note that when using ``func`` functions, unlike when using Python `datetime`
-functions we *do* call the function, i.e. with parenthesis "()" - this is
-because what we want in this case is the return value of the function, which
-is the SQL expression construct that will be rendered into the INSERT or
-UPDATE statement.
-
-The above SQL functions are usually executed "inline" with the INSERT or
-UPDATE statement being executed, meaning, a single statement is executed which
-embeds the given expressions or subqueries within the VALUES or SET clause of
-the statement. Although in some cases, the function is "pre-executed" in a
-SELECT statement of its own beforehand. This happens when all of the following
-is true:
-
-* the column is a primary key column
-* the database dialect does not support a usable ``cursor.lastrowid`` accessor
- (or equivalent); this currently includes PostgreSQL, Oracle, and Firebird, as
- well as some MySQL dialects.
-* the dialect does not support the "RETURNING" clause or similar, or the
- ``implicit_returning`` flag is set to ``False`` for the dialect. Dialects
- which support RETURNING currently include Postgresql, Oracle, Firebird, and
- MS-SQL.
-* the statement is a single execution, i.e. only supplies one set of
- parameters and doesn't use "executemany" behavior
-* the ``inline=True`` flag is not set on the
- :class:`~sqlalchemy.sql.expression.Insert()` or
- :class:`~sqlalchemy.sql.expression.Update()` construct, and the statement has
- not defined an explicit `returning()` clause.
-
-Whether or not the default generation clause "pre-executes" is not something
-that normally needs to be considered, unless it is being addressed for
-performance reasons.
-
-When the statement is executed with a single set of parameters (that is, it is
-not an "executemany" style execution), the returned
-:class:`~sqlalchemy.engine.ResultProxy` will contain a collection
-accessible via ``result.postfetch_cols()`` which contains a list of all
-:class:`~sqlalchemy.schema.Column` objects which had an inline-executed
-default. Similarly, all parameters which were bound to the statement,
-including all Python and SQL expressions which were pre-executed, are present
-in the ``last_inserted_params()`` or ``last_updated_params()`` collections on
-:class:`~sqlalchemy.engine.ResultProxy`. The ``inserted_primary_key``
-collection contains a list of primary key values for the row inserted (a list
-so that single-column and composite-column primary keys are represented in the
-same format).
-
-Server Side Defaults
---------------------
-
-A variant on the SQL expression default is the ``server_default``, which gets
-placed in the CREATE TABLE statement during a ``create()`` operation:
-
-.. sourcecode:: python+sql
-
- t = Table('test', meta,
- Column('abc', String(20), server_default='abc'),
- Column('created_at', DateTime, server_default=text("sysdate"))
- )
-
-A create call for the above table will produce::
-
- CREATE TABLE test (
- abc varchar(20) default 'abc',
- created_at datetime default sysdate
- )
-
-The behavior of ``server_default`` is similar to that of a regular SQL
-default; if it's placed on a primary key column for a database which doesn't
-have a way to "postfetch" the ID, and the statement is not "inlined", the SQL
-expression is pre-executed; otherwise, SQLAlchemy lets the default fire off on
-the database side normally.
-
-.. _triggered_columns:
-
-Triggered Columns
-------------------
-
-Columns with values set by a database trigger or other external process may be
-called out using :class:`.FetchedValue` as a marker::
-
- t = Table('test', meta,
- Column('abc', String(20), server_default=FetchedValue()),
- Column('def', String(20), server_onupdate=FetchedValue())
- )
-
-.. versionchanged:: 0.8.0b2,0.7.10
- The ``for_update`` argument on :class:`.FetchedValue` is set automatically
- when specified as the ``server_onupdate`` argument. If using an older version,
- specify the onupdate above as ``server_onupdate=FetchedValue(for_update=True)``.
-
-These markers do not emit a "default" clause when the table is created,
-however they do set the same internal flags as a static ``server_default``
-clause, providing hints to higher-level tools that a "post-fetch" of these
-rows should be performed after an insert or update.
-
-.. note::
-
- It's generally not appropriate to use :class:`.FetchedValue` in
- conjunction with a primary key column, particularly when using the
- ORM or any other scenario where the :attr:`.ResultProxy.inserted_primary_key`
- attribute is required. This is becaue the "post-fetch" operation requires
- that the primary key value already be available, so that the
- row can be selected on its primary key.
-
- For a server-generated primary key value, all databases provide special
- accessors or other techniques in order to acquire the "last inserted
- primary key" column of a table. These mechanisms aren't affected by the presence
- of :class:`.FetchedValue`. For special situations where triggers are
- used to generate primary key values, and the database in use does not
- support the ``RETURNING`` clause, it may be necessary to forego the usage
- of the trigger and instead apply the SQL expression or function as a
- "pre execute" expression::
-
- t = Table('test', meta,
- Column('abc', MyType, default=func.generate_new_value(), primary_key=True)
- )
-
- Where above, when :meth:`.Table.insert` is used,
- the ``func.generate_new_value()`` expression will be pre-executed
- in the context of a scalar ``SELECT`` statement, and the new value will
- be applied to the subsequent ``INSERT``, while at the same time being
- made available to the :attr:`.ResultProxy.inserted_primary_key`
- attribute.
-
-
-Defining Sequences
--------------------
-
-SQLAlchemy represents database sequences using the
-:class:`~sqlalchemy.schema.Sequence` object, which is considered to be a
-special case of "column default". It only has an effect on databases which
-have explicit support for sequences, which currently includes Postgresql,
-Oracle, and Firebird. The :class:`~sqlalchemy.schema.Sequence` object is
-otherwise ignored.
-
-The :class:`~sqlalchemy.schema.Sequence` may be placed on any column as a
-"default" generator to be used during INSERT operations, and can also be
-configured to fire off during UPDATE operations if desired. It is most
-commonly used in conjunction with a single integer primary key column::
-
- table = Table("cartitems", meta,
- Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True),
- Column("description", String(40)),
- Column("createdate", DateTime())
- )
-
-Where above, the table "cartitems" is associated with a sequence named
-"cart_id_seq". When INSERT statements take place for "cartitems", and no value
-is passed for the "cart_id" column, the "cart_id_seq" sequence will be used to
-generate a value.
-
-When the :class:`~sqlalchemy.schema.Sequence` is associated with a table,
-CREATE and DROP statements issued for that table will also issue CREATE/DROP
-for the sequence object as well, thus "bundling" the sequence object with its
-parent table.
-
-The :class:`~sqlalchemy.schema.Sequence` object also implements special
-functionality to accommodate Postgresql's SERIAL datatype. The SERIAL type in
-PG automatically generates a sequence that is used implicitly during inserts.
-This means that if a :class:`~sqlalchemy.schema.Table` object defines a
-:class:`~sqlalchemy.schema.Sequence` on its primary key column so that it
-works with Oracle and Firebird, the :class:`~sqlalchemy.schema.Sequence` would
-get in the way of the "implicit" sequence that PG would normally use. For this
-use case, add the flag ``optional=True`` to the
-:class:`~sqlalchemy.schema.Sequence` object - this indicates that the
-:class:`~sqlalchemy.schema.Sequence` should only be used if the database
-provides no other option for generating primary key identifiers.
-
-The :class:`~sqlalchemy.schema.Sequence` object also has the ability to be
-executed standalone like a SQL expression, which has the effect of calling its
-"next value" function::
-
- seq = Sequence('some_sequence')
- nextid = connection.execute(seq)
-
-Default Objects API
--------------------
-
-.. autoclass:: ColumnDefault
- :show-inheritance:
-
-.. autoclass:: DefaultClause
- :show-inheritance:
-
-.. autoclass:: DefaultGenerator
- :show-inheritance:
-
-.. autoclass:: FetchedValue
- :show-inheritance:
-
-.. autoclass:: PassiveDefault
- :show-inheritance:
-
-.. autoclass:: Sequence
- :show-inheritance:
- :members:
-
-Defining Constraints and Indexes
-=================================
-
-.. _metadata_foreignkeys:
-.. _metadata_constraints:
-
-Defining Foreign Keys
----------------------
-
-A *foreign key* in SQL is a table-level construct that constrains one or more
-columns in that table to only allow values that are present in a different set
-of columns, typically but not always located on a different table. We call the
-columns which are constrained the *foreign key* columns and the columns which
-they are constrained towards the *referenced* columns. The referenced columns
-almost always define the primary key for their owning table, though there are
-exceptions to this. The foreign key is the "joint" that connects together
-pairs of rows which have a relationship with each other, and SQLAlchemy
-assigns very deep importance to this concept in virtually every area of its
-operation.
-
-In SQLAlchemy as well as in DDL, foreign key constraints can be defined as
-additional attributes within the table clause, or for single-column foreign
-keys they may optionally be specified within the definition of a single
-column. The single column foreign key is more common, and at the column level
-is specified by constructing a :class:`~sqlalchemy.schema.ForeignKey` object
-as an argument to a :class:`~sqlalchemy.schema.Column` object::
-
- user_preference = Table('user_preference', metadata,
- Column('pref_id', Integer, primary_key=True),
- Column('user_id', Integer, ForeignKey("user.user_id"), nullable=False),
- Column('pref_name', String(40), nullable=False),
- Column('pref_value', String(100))
- )
-
-Above, we define a new table ``user_preference`` for which each row must
-contain a value in the ``user_id`` column that also exists in the ``user``
-table's ``user_id`` column.
-
-The argument to :class:`~sqlalchemy.schema.ForeignKey` is most commonly a
-string of the form *<tablename>.<columnname>*, or for a table in a remote
-schema or "owner" of the form *<schemaname>.<tablename>.<columnname>*. It may
-also be an actual :class:`~sqlalchemy.schema.Column` object, which as we'll
-see later is accessed from an existing :class:`~sqlalchemy.schema.Table`
-object via its ``c`` collection::
-
- ForeignKey(user.c.user_id)
-
-The advantage to using a string is that the in-python linkage between ``user``
-and ``user_preference`` is resolved only when first needed, so that table
-objects can be easily spread across multiple modules and defined in any order.
-
-Foreign keys may also be defined at the table level, using the
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` object. This object can
-describe a single- or multi-column foreign key. A multi-column foreign key is
-known as a *composite* foreign key, and almost always references a table that
-has a composite primary key. Below we define a table ``invoice`` which has a
-composite primary key::
-
- invoice = Table('invoice', metadata,
- Column('invoice_id', Integer, primary_key=True),
- Column('ref_num', Integer, primary_key=True),
- Column('description', String(60), nullable=False)
- )
-
-And then a table ``invoice_item`` with a composite foreign key referencing
-``invoice``::
-
- invoice_item = Table('invoice_item', metadata,
- Column('item_id', Integer, primary_key=True),
- Column('item_name', String(60), nullable=False),
- Column('invoice_id', Integer, nullable=False),
- Column('ref_num', Integer, nullable=False),
- ForeignKeyConstraint(['invoice_id', 'ref_num'], ['invoice.invoice_id', 'invoice.ref_num'])
- )
-
-It's important to note that the
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` is the only way to define a
-composite foreign key. While we could also have placed individual
-:class:`~sqlalchemy.schema.ForeignKey` objects on both the
-``invoice_item.invoice_id`` and ``invoice_item.ref_num`` columns, SQLAlchemy
-would not be aware that these two values should be paired together - it would
-be two individual foreign key constraints instead of a single composite
-foreign key referencing two columns.
-
-.. _use_alter:
-
-Creating/Dropping Foreign Key Constraints via ALTER
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In all the above examples, the :class:`~sqlalchemy.schema.ForeignKey` object
-causes the "REFERENCES" keyword to be added inline to a column definition
-within a "CREATE TABLE" statement when
-:func:`~sqlalchemy.schema.MetaData.create_all` is issued, and
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` invokes the "CONSTRAINT"
-keyword inline with "CREATE TABLE". There are some cases where this is
-undesireable, particularly when two tables reference each other mutually, each
-with a foreign key referencing the other. In such a situation at least one of
-the foreign key constraints must be generated after both tables have been
-built. To support such a scheme, :class:`~sqlalchemy.schema.ForeignKey` and
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` offer the flag
-``use_alter=True``. When using this flag, the constraint will be generated
-using a definition similar to "ALTER TABLE <tablename> ADD CONSTRAINT <name>
-...". Since a name is required, the ``name`` attribute must also be specified.
-For example::
-
- node = Table('node', meta,
- Column('node_id', Integer, primary_key=True),
- Column('primary_element', Integer,
- ForeignKey('element.element_id', use_alter=True, name='fk_node_element_id')
- )
- )
-
- element = Table('element', meta,
- Column('element_id', Integer, primary_key=True),
- Column('parent_node_id', Integer),
- ForeignKeyConstraint(
- ['parent_node_id'],
- ['node.node_id'],
- use_alter=True,
- name='fk_element_parent_node_id'
- )
- )
-
-ON UPDATE and ON DELETE
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Most databases support *cascading* of foreign key values, that is the when a
-parent row is updated the new value is placed in child rows, or when the
-parent row is deleted all corresponding child rows are set to null or deleted.
-In data definition language these are specified using phrases like "ON UPDATE
-CASCADE", "ON DELETE CASCADE", and "ON DELETE SET NULL", corresponding to
-foreign key constraints. The phrase after "ON UPDATE" or "ON DELETE" may also
-other allow other phrases that are specific to the database in use. The
-:class:`~sqlalchemy.schema.ForeignKey` and
-:class:`~sqlalchemy.schema.ForeignKeyConstraint` objects support the
-generation of this clause via the ``onupdate`` and ``ondelete`` keyword
-arguments. The value is any string which will be output after the appropriate
-"ON UPDATE" or "ON DELETE" phrase::
-
- child = Table('child', meta,
- Column('id', Integer,
- ForeignKey('parent.id', onupdate="CASCADE", ondelete="CASCADE"),
- primary_key=True
- )
- )
-
- composite = Table('composite', meta,
- Column('id', Integer, primary_key=True),
- Column('rev_id', Integer),
- Column('note_id', Integer),
- ForeignKeyConstraint(
- ['rev_id', 'note_id'],
- ['revisions.id', 'revisions.note_id'],
- onupdate="CASCADE", ondelete="SET NULL"
- )
- )
-
-Note that these clauses are not supported on SQLite, and require ``InnoDB``
-tables when used with MySQL. They may also not be supported on other
-databases.
-
-
-UNIQUE Constraint
------------------
-
-Unique constraints can be created anonymously on a single column using the
-``unique`` keyword on :class:`~sqlalchemy.schema.Column`. Explicitly named
-unique constraints and/or those with multiple columns are created via the
-:class:`~sqlalchemy.schema.UniqueConstraint` table-level construct.
-
-.. sourcecode:: python+sql
-
- meta = MetaData()
- mytable = Table('mytable', meta,
-
- # per-column anonymous unique constraint
- Column('col1', Integer, unique=True),
-
- Column('col2', Integer),
- Column('col3', Integer),
-
- # explicit/composite unique constraint. 'name' is optional.
- UniqueConstraint('col2', 'col3', name='uix_1')
- )
-
-CHECK Constraint
-----------------
-
-Check constraints can be named or unnamed and can be created at the Column or
-Table level, using the :class:`~sqlalchemy.schema.CheckConstraint` construct.
-The text of the check constraint is passed directly through to the database,
-so there is limited "database independent" behavior. Column level check
-constraints generally should only refer to the column to which they are
-placed, while table level constraints can refer to any columns in the table.
-
-Note that some databases do not actively support check constraints such as
-MySQL.
-
-.. sourcecode:: python+sql
-
- meta = MetaData()
- mytable = Table('mytable', meta,
-
- # per-column CHECK constraint
- Column('col1', Integer, CheckConstraint('col1>5')),
-
- Column('col2', Integer),
- Column('col3', Integer),
-
- # table level CHECK constraint. 'name' is optional.
- CheckConstraint('col2 > col3 + 5', name='check1')
- )
-
- {sql}mytable.create(engine)
- CREATE TABLE mytable (
- col1 INTEGER CHECK (col1>5),
- col2 INTEGER,
- col3 INTEGER,
- CONSTRAINT check1 CHECK (col2 > col3 + 5)
- ){stop}
-
-Setting up Constraints when using the Declarative ORM Extension
-----------------------------------------------------------------
-
-The :class:`.Table` is the SQLAlchemy Core construct that allows one to define
-table metadata, which among other things can be used by the SQLAlchemy ORM
-as a target to map a class. The :ref:`Declarative <declarative_toplevel>`
-extension allows the :class:`.Table` object to be created automatically, given
-the contents of the table primarily as a mapping of :class:`.Column` objects.
-
-To apply table-level constraint objects such as :class:`.ForeignKeyConstraint`
-to a table defined using Declarative, use the ``__table_args__`` attribute,
-described at :ref:`declarative_table_args`.
-
-Constraints API
----------------
-.. autoclass:: Constraint
- :show-inheritance:
-
-.. autoclass:: CheckConstraint
- :show-inheritance:
-
-.. autoclass:: ColumnCollectionConstraint
- :show-inheritance:
-
-.. autoclass:: ForeignKey
- :members:
- :show-inheritance:
-
-.. autoclass:: ForeignKeyConstraint
- :members:
- :show-inheritance:
-
-.. autoclass:: PrimaryKeyConstraint
- :show-inheritance:
-
-.. autoclass:: UniqueConstraint
- :show-inheritance:
-
-.. _schema_indexes:
-
-Indexes
--------
-
-Indexes can be created anonymously (using an auto-generated name ``ix_<column
-label>``) for a single column using the inline ``index`` keyword on
-:class:`~sqlalchemy.schema.Column`, which also modifies the usage of
-``unique`` to apply the uniqueness to the index itself, instead of adding a
-separate UNIQUE constraint. For indexes with specific names or which encompass
-more than one column, use the :class:`~sqlalchemy.schema.Index` construct,
-which requires a name.
-
-Below we illustrate a :class:`~sqlalchemy.schema.Table` with several
-:class:`~sqlalchemy.schema.Index` objects associated. The DDL for "CREATE
-INDEX" is issued right after the create statements for the table:
-
-.. sourcecode:: python+sql
-
- meta = MetaData()
- mytable = Table('mytable', meta,
- # an indexed column, with index "ix_mytable_col1"
- Column('col1', Integer, index=True),
-
- # a uniquely indexed column with index "ix_mytable_col2"
- Column('col2', Integer, index=True, unique=True),
-
- Column('col3', Integer),
- Column('col4', Integer),
-
- Column('col5', Integer),
- Column('col6', Integer),
- )
-
- # place an index on col3, col4
- Index('idx_col34', mytable.c.col3, mytable.c.col4)
-
- # place a unique index on col5, col6
- Index('myindex', mytable.c.col5, mytable.c.col6, unique=True)
-
- {sql}mytable.create(engine)
- CREATE TABLE mytable (
- col1 INTEGER,
- col2 INTEGER,
- col3 INTEGER,
- col4 INTEGER,
- col5 INTEGER,
- col6 INTEGER
- )
- CREATE INDEX ix_mytable_col1 ON mytable (col1)
- CREATE UNIQUE INDEX ix_mytable_col2 ON mytable (col2)
- CREATE UNIQUE INDEX myindex ON mytable (col5, col6)
- CREATE INDEX idx_col34 ON mytable (col3, col4){stop}
-
-Note in the example above, the :class:`.Index` construct is created
-externally to the table which it corresponds, using :class:`.Column`
-objects directly. :class:`.Index` also supports
-"inline" definition inside the :class:`.Table`, using string names to
-identify columns::
-
- meta = MetaData()
- mytable = Table('mytable', meta,
- Column('col1', Integer),
-
- Column('col2', Integer),
-
- Column('col3', Integer),
- Column('col4', Integer),
-
- # place an index on col1, col2
- Index('idx_col12', 'col1', 'col2'),
-
- # place a unique index on col3, col4
- Index('idx_col34', 'col3', 'col4', unique=True)
- )
-
-.. versionadded:: 0.7
- Support of "inline" definition inside the :class:`.Table`
- for :class:`.Index`\ .
-
-The :class:`~sqlalchemy.schema.Index` object also supports its own ``create()`` method:
-
-.. sourcecode:: python+sql
-
- i = Index('someindex', mytable.c.col5)
- {sql}i.create(engine)
- CREATE INDEX someindex ON mytable (col5){stop}
-
-.. _schema_indexes_functional:
-
-Functional Indexes
-~~~~~~~~~~~~~~~~~~~
-
-:class:`.Index` supports SQL and function expressions, as supported by the
-target backend. To create an index against a column using a descending
-value, the :meth:`.ColumnElement.desc` modifier may be used::
-
- from sqlalchemy import Index
-
- Index('someindex', mytable.c.somecol.desc())
-
-Or with a backend that supports functional indexes such as Postgresql,
-a "case insensitive" index can be created using the ``lower()`` function::
-
- from sqlalchemy import func, Index
-
- Index('someindex', func.lower(mytable.c.somecol))
-
-.. versionadded:: 0.8 :class:`.Index` supports SQL expressions and functions
- as well as plain columns.
-
-Index API
----------
-
-.. autoclass:: Index
- :show-inheritance:
- :members:
-
-.. _metadata_ddl:
-
-Customizing DDL
-===============
-
-In the preceding sections we've discussed a variety of schema constructs
-including :class:`~sqlalchemy.schema.Table`,
-:class:`~sqlalchemy.schema.ForeignKeyConstraint`,
-:class:`~sqlalchemy.schema.CheckConstraint`, and
-:class:`~sqlalchemy.schema.Sequence`. Throughout, we've relied upon the
-``create()`` and :func:`~sqlalchemy.schema.MetaData.create_all` methods of
-:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.MetaData` in
-order to issue data definition language (DDL) for all constructs. When issued,
-a pre-determined order of operations is invoked, and DDL to create each table
-is created unconditionally including all constraints and other objects
-associated with it. For more complex scenarios where database-specific DDL is
-required, SQLAlchemy offers two techniques which can be used to add any DDL
-based on any condition, either accompanying the standard generation of tables
-or by itself.
-
-.. _schema_ddl_sequences:
-
-Controlling DDL Sequences
--------------------------
-
-The ``sqlalchemy.schema`` package contains SQL expression constructs that
-provide DDL expressions. For example, to produce a ``CREATE TABLE`` statement:
-
-.. sourcecode:: python+sql
-
- from sqlalchemy.schema import CreateTable
- {sql}engine.execute(CreateTable(mytable))
- CREATE TABLE mytable (
- col1 INTEGER,
- col2 INTEGER,
- col3 INTEGER,
- col4 INTEGER,
- col5 INTEGER,
- col6 INTEGER
- ){stop}
-
-Above, the :class:`~sqlalchemy.schema.CreateTable` construct works like any
-other expression construct (such as ``select()``, ``table.insert()``, etc.). A
-full reference of available constructs is in :ref:`schema_api_ddl`.
-
-The DDL constructs all extend a common base class which provides the
-capability to be associated with an individual
-:class:`~sqlalchemy.schema.Table` or :class:`~sqlalchemy.schema.MetaData`
-object, to be invoked upon create/drop events. Consider the example of a table
-which contains a CHECK constraint:
-
-.. sourcecode:: python+sql
-
- users = Table('users', metadata,
- Column('user_id', Integer, primary_key=True),
- Column('user_name', String(40), nullable=False),
- CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
- )
-
- {sql}users.create(engine)
- CREATE TABLE users (
- user_id SERIAL NOT NULL,
- user_name VARCHAR(40) NOT NULL,
- PRIMARY KEY (user_id),
- CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8)
- ){stop}
-
-The above table contains a column "user_name" which is subject to a CHECK
-constraint that validates that the length of the string is at least eight
-characters. When a ``create()`` is issued for this table, DDL for the
-:class:`~sqlalchemy.schema.CheckConstraint` will also be issued inline within
-the table definition.
-
-The :class:`~sqlalchemy.schema.CheckConstraint` construct can also be
-constructed externally and associated with the
-:class:`~sqlalchemy.schema.Table` afterwards::
-
- constraint = CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
- users.append_constraint(constraint)
-
-So far, the effect is the same. However, if we create DDL elements
-corresponding to the creation and removal of this constraint, and associate
-them with the :class:`.Table` as events, these new events
-will take over the job of issuing DDL for the constraint. Additionally, the
-constraint will be added via ALTER:
-
-.. sourcecode:: python+sql
-
- from sqlalchemy import event
-
- event.listen(
- users,
- "after_create",
- AddConstraint(constraint)
- )
- event.listen(
- users,
- "before_drop",
- DropConstraint(constraint)
- )
-
- {sql}users.create(engine)
- CREATE TABLE users (
- user_id SERIAL NOT NULL,
- user_name VARCHAR(40) NOT NULL,
- PRIMARY KEY (user_id)
- )
-
- ALTER TABLE users ADD CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8){stop}
-
- {sql}users.drop(engine)
- ALTER TABLE users DROP CONSTRAINT cst_user_name_length
- DROP TABLE users{stop}
-
-The real usefulness of the above becomes clearer once we illustrate the
-:meth:`.DDLElement.execute_if` method. This method returns a modified form of
-the DDL callable which will filter on criteria before responding to a
-received event. It accepts a parameter ``dialect``, which is the string
-name of a dialect or a tuple of such, which will limit the execution of the
-item to just those dialects. It also accepts a ``callable_`` parameter which
-may reference a Python callable which will be invoked upon event reception,
-returning ``True`` or ``False`` indicating if the event should proceed.
-
-If our :class:`~sqlalchemy.schema.CheckConstraint` was only supported by
-Postgresql and not other databases, we could limit its usage to just that dialect::
-
- event.listen(
- users,
- 'after_create',
- AddConstraint(constraint).execute_if(dialect='postgresql')
- )
- event.listen(
- users,
- 'before_drop',
- DropConstraint(constraint).execute_if(dialect='postgresql')
- )
-
-Or to any set of dialects::
-
- event.listen(
- users,
- "after_create",
- AddConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
- )
- event.listen(
- users,
- "before_drop",
- DropConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
- )
-
-When using a callable, the callable is passed the ddl element, the
-:class:`.Table` or :class:`.MetaData`
-object whose "create" or "drop" event is in progress, and the
-:class:`.Connection` object being used for the
-operation, as well as additional information as keyword arguments. The
-callable can perform checks, such as whether or not a given item already
-exists. Below we define ``should_create()`` and ``should_drop()`` callables
-that check for the presence of our named constraint:
-
-.. sourcecode:: python+sql
-
- def should_create(ddl, target, connection, **kw):
- row = connection.execute("select conname from pg_constraint where conname='%s'" % ddl.element.name).scalar()
- return not bool(row)
-
- def should_drop(ddl, target, connection, **kw):
- return not should_create(ddl, target, connection, **kw)
-
- event.listen(
- users,
- "after_create",
- AddConstraint(constraint).execute_if(callable_=should_create)
- )
- event.listen(
- users,
- "before_drop",
- DropConstraint(constraint).execute_if(callable_=should_drop)
- )
-
- {sql}users.create(engine)
- CREATE TABLE users (
- user_id SERIAL NOT NULL,
- user_name VARCHAR(40) NOT NULL,
- PRIMARY KEY (user_id)
- )
-
- select conname from pg_constraint where conname='cst_user_name_length'
- ALTER TABLE users ADD CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8){stop}
-
- {sql}users.drop(engine)
- select conname from pg_constraint where conname='cst_user_name_length'
- ALTER TABLE users DROP CONSTRAINT cst_user_name_length
- DROP TABLE users{stop}
-
-Custom DDL
-----------
-
-Custom DDL phrases are most easily achieved using the
-:class:`~sqlalchemy.schema.DDL` construct. This construct works like all the
-other DDL elements except it accepts a string which is the text to be emitted:
-
-.. sourcecode:: python+sql
-
- event.listen(
- metadata,
- "after_create",
- DDL("ALTER TABLE users ADD CONSTRAINT "
- "cst_user_name_length "
- " CHECK (length(user_name) >= 8)")
- )
-
-A more comprehensive method of creating libraries of DDL constructs is to use
-custom compilation - see :ref:`sqlalchemy.ext.compiler_toplevel` for
-details.
-
-.. _schema_api_ddl:
-
-DDL Expression Constructs API
------------------------------
-
-.. autoclass:: DDLElement
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: DDL
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: CreateTable
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: DropTable
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: CreateColumn
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: CreateSequence
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: DropSequence
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: CreateIndex
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: DropIndex
- :members:
- :undoc-members:
- :show-inheritance:
-
-.. autoclass:: AddConstraint
- :members:
- :undoc-members:
- :show-inheritance:
+.. toctree::
+ :maxdepth: 1
-.. autoclass:: DropConstraint
- :members:
- :undoc-members:
- :show-inheritance:
+ metadata
+ reflection
+ defaults
+ constraints
+ ddl
-.. autoclass:: CreateSchema
- :members:
- :undoc-members:
- :show-inheritance:
-.. autoclass:: DropSchema
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/doc/build/core/selectable.rst b/doc/build/core/selectable.rst
new file mode 100644
index 000000000..52acb28e5
--- /dev/null
+++ b/doc/build/core/selectable.rst
@@ -0,0 +1,85 @@
+Selectables, Tables, FROM objects
+=================================
+
+The term "selectable" refers to any object that rows can be selected from;
+in SQLAlchemy, these objects descend from :class:`.FromClause` and their
+distinguishing feature is their :attr:`.FromClause.c` attribute, which is
+a namespace of all the columns contained within the FROM clause (these
+elements are themselves :class:`.ColumnElement` subclasses).
+
+.. module:: sqlalchemy.sql.expression
+
+.. autofunction:: alias
+
+.. autofunction:: except_
+
+.. autofunction:: except_all
+
+.. autofunction:: exists
+
+.. autofunction:: intersect
+
+.. autofunction:: intersect_all
+
+.. autofunction:: join
+
+.. autofunction:: outerjoin
+
+.. autofunction:: select
+
+.. autofunction:: subquery
+
+.. autofunction:: sqlalchemy.sql.expression.table
+
+.. autofunction:: union
+
+.. autofunction:: union_all
+
+.. autoclass:: Alias
+ :members:
+ :inherited-members:
+
+.. autoclass:: CompoundSelect
+ :members:
+ :inherited-members:
+
+.. autoclass:: CTE
+ :members:
+ :inherited-members:
+
+.. autoclass:: Executable
+ :members:
+
+.. autoclass:: FromClause
+ :members:
+
+.. autoclass:: GenerativeSelect
+ :members:
+ :inherited-members:
+
+.. autoclass:: HasPrefixes
+ :members:
+
+.. autoclass:: Join
+ :members:
+ :inherited-members:
+
+.. autoclass:: ScalarSelect
+ :members:
+
+.. autoclass:: Select
+ :members:
+ :inherited-members:
+
+.. autoclass:: Selectable
+ :members:
+
+.. autoclass:: SelectBase
+ :members:
+
+.. autoclass:: TableClause
+ :members:
+ :inherited-members:
+
+.. autoclass:: TextAsFrom
+ :members:
diff --git a/doc/build/core/sqlelement.rst b/doc/build/core/sqlelement.rst
new file mode 100644
index 000000000..47855a6a3
--- /dev/null
+++ b/doc/build/core/sqlelement.rst
@@ -0,0 +1,140 @@
+Column Elements and Expressions
+===============================
+
+.. module:: sqlalchemy.sql.expression
+
+The most fundamental part of the SQL expression API are the "column elements",
+which allow for basic SQL expression support. The core of all SQL expression
+constructs is the :class:`.ClauseElement`, which is the base for several
+sub-branches. The :class:`.ColumnElement` class is the fundamental unit
+used to construct any kind of typed SQL expression.
+
+.. autofunction:: and_
+
+.. autofunction:: asc
+
+.. autofunction:: between
+
+.. autofunction:: bindparam
+
+.. autofunction:: case
+
+.. autofunction:: cast
+
+.. autofunction:: sqlalchemy.sql.expression.column
+
+.. autofunction:: collate
+
+.. autofunction:: desc
+
+.. autofunction:: distinct
+
+.. autofunction:: extract
+
+.. autofunction:: false
+
+.. autodata:: func
+
+.. autofunction:: label
+
+.. autofunction:: literal
+
+.. autofunction:: literal_column
+
+.. autofunction:: not_
+
+.. autofunction:: null
+
+.. autofunction:: nullsfirst
+
+.. autofunction:: nullslast
+
+.. autofunction:: or_
+
+.. autofunction:: outparam
+
+.. autofunction:: over
+
+.. autofunction:: text
+
+.. autofunction:: true
+
+.. autofunction:: tuple_
+
+.. autofunction:: type_coerce
+
+.. autoclass:: BinaryExpression
+ :members:
+
+.. autoclass:: BindParameter
+ :members:
+
+.. autoclass:: Case
+ :members:
+
+.. autoclass:: Cast
+ :members:
+
+.. autoclass:: ClauseElement
+ :members:
+
+
+.. autoclass:: ClauseList
+ :members:
+
+
+.. autoclass:: ColumnClause
+ :members:
+
+.. autoclass:: ColumnCollection
+ :members:
+
+
+.. autoclass:: ColumnElement
+ :members:
+ :inherited-members:
+ :undoc-members:
+
+.. autoclass:: sqlalchemy.sql.operators.ColumnOperators
+ :members:
+ :special-members:
+ :inherited-members:
+
+.. autoclass:: Extract
+ :members:
+
+.. autoclass:: sqlalchemy.sql.elements.False_
+ :members:
+
+.. autoclass:: Label
+ :members:
+
+.. autoclass:: sqlalchemy.sql.elements.Null
+ :members:
+
+.. autoclass:: Over
+ :members:
+
+.. autoclass:: TextClause
+ :members:
+
+.. autoclass:: Tuple
+ :members:
+
+.. autoclass:: sqlalchemy.sql.elements.True_
+ :members:
+
+.. autoclass:: sqlalchemy.sql.operators.custom_op
+ :members:
+
+.. autoclass:: sqlalchemy.sql.operators.Operators
+ :members:
+ :special-members:
+
+.. autoclass:: sqlalchemy.sql.elements.quoted_name
+
+.. autoclass:: UnaryExpression
+ :members:
+
+
+
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index 0203248ae..c2a55233d 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -50,13 +50,13 @@ Version Check
=============
-A quick check to verify that we are on at least **version 0.8** of SQLAlchemy:
+A quick check to verify that we are on at least **version 0.9** of SQLAlchemy:
.. sourcecode:: pycon+sql
>>> import sqlalchemy
>>> sqlalchemy.__version__ # doctest:+SKIP
- 0.8.0
+ 0.9.0
Connecting
==========
@@ -238,7 +238,7 @@ we use the ``connect()`` method::
>>> conn #doctest: +ELLIPSIS
<sqlalchemy.engine.base.Connection object at 0x...>
-The :class:`~sqlalchemy.engine.base.Connection` object represents an actively
+The :class:`~sqlalchemy.engine.Connection` object represents an actively
checked out DBAPI connection resource. Lets feed it our
:class:`~sqlalchemy.sql.expression.Insert` object and see what happens:
@@ -252,7 +252,7 @@ checked out DBAPI connection resource. Lets feed it our
So the INSERT statement was now issued to the database. Although we got
positional "qmark" bind parameters instead of "named" bind parameters in the
output. How come ? Because when executed, the
-:class:`~sqlalchemy.engine.base.Connection` used the SQLite **dialect** to
+:class:`~sqlalchemy.engine.Connection` used the SQLite **dialect** to
help generate the statement; when we use the ``str()`` function, the statement
isn't aware of this dialect, and falls back onto a default which uses named
parameters. We can view this manually as follows:
@@ -264,9 +264,9 @@ parameters. We can view this manually as follows:
'INSERT INTO users (name, fullname) VALUES (?, ?)'
What about the ``result`` variable we got when we called ``execute()`` ? As
-the SQLAlchemy :class:`~sqlalchemy.engine.base.Connection` object references a
+the SQLAlchemy :class:`~sqlalchemy.engine.Connection` object references a
DBAPI connection, the result, known as a
-:class:`~sqlalchemy.engine.result.ResultProxy` object, is analogous to the DBAPI
+:class:`~sqlalchemy.engine.ResultProxy` object, is analogous to the DBAPI
cursor object. In the case of an INSERT, we can get important information from
it, such as the primary key values which were generated from our statement:
@@ -281,7 +281,7 @@ did not specify the ``id`` column in our
value would have been used. In either case, SQLAlchemy always knows how to get
at a newly generated primary key value, even though the method of generating
them is different across different databases; each database's
-:class:`~sqlalchemy.engine.base.Dialect` knows the specific steps needed to
+:class:`~sqlalchemy.engine.interfaces.Dialect` knows the specific steps needed to
determine the correct value (or values; note that ``inserted_primary_key``
returns a list so that it supports composite primary keys).
@@ -292,7 +292,7 @@ Our insert example above was intentionally a little drawn out to show some
various behaviors of expression language constructs. In the usual case, an
:class:`~sqlalchemy.sql.expression.Insert` statement is usually compiled
against the parameters sent to the ``execute()`` method on
-:class:`~sqlalchemy.engine.base.Connection`, so that there's no need to use
+:class:`~sqlalchemy.engine.Connection`, so that there's no need to use
the ``values`` keyword with :class:`~sqlalchemy.sql.expression.Insert`. Lets
create a generic :class:`~sqlalchemy.sql.expression.Insert` statement again
and use it in the "normal" way:
@@ -363,10 +363,10 @@ Above, we issued a basic :func:`.select` call, placing the ``users`` table
within the COLUMNS clause of the select, and then executing. SQLAlchemy
expanded the ``users`` table into the set of each of its columns, and also
generated a FROM clause for us. The result returned is again a
-:class:`~sqlalchemy.engine.result.ResultProxy` object, which acts much like a
+:class:`~sqlalchemy.engine.ResultProxy` object, which acts much like a
DBAPI cursor, including methods such as
-:func:`~sqlalchemy.engine.result.ResultProxy.fetchone` and
-:func:`~sqlalchemy.engine.result.ResultProxy.fetchall`. The easiest way to get
+:func:`~sqlalchemy.engine.ResultProxy.fetchone` and
+:func:`~sqlalchemy.engine.ResultProxy.fetchall`. The easiest way to get
rows from it is to just iterate:
.. sourcecode:: pycon+sql
@@ -414,7 +414,7 @@ But another way, whose usefulness will become apparent later on, is to use the
Result sets which have pending rows remaining should be explicitly closed
before discarding. While the cursor and connection resources referenced by the
-:class:`~sqlalchemy.engine.result.ResultProxy` will be respectively closed and
+:class:`~sqlalchemy.engine.ResultProxy` will be respectively closed and
returned to the connection pool when the object is garbage collected, it's
better to make it explicit as some database APIs are very picky about such
things:
@@ -1593,6 +1593,8 @@ table, or the same table:
COMMIT
{stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
+.. _multi_table_updates:
+
Multiple Table Updates
----------------------
diff --git a/doc/build/core/types.rst b/doc/build/core/types.rst
index 131e8e64d..b4781ee51 100644
--- a/doc/build/core/types.rst
+++ b/doc/build/core/types.rst
@@ -9,7 +9,7 @@ SQLAlchemy provides abstractions for most common database data types,
and a mechanism for specifying your own custom data types.
The methods and attributes of type objects are rarely used directly.
-Type objects are supplied to :class:`~sqlalchemy.Table` definitions
+Type objects are supplied to :class:`~sqlalchemy.schema.Table` definitions
and can be supplied as type hints to `functions` for occasions where
the database driver returns an incorrect type.
@@ -24,7 +24,7 @@ the database driver returns an incorrect type.
SQLAlchemy will use the ``Integer`` and ``String(32)`` type
information when issuing a ``CREATE TABLE`` statement and will use it
again when reading back rows ``SELECTed`` from the database.
-Functions that accept a type (such as :func:`~sqlalchemy.Column`) will
+Functions that accept a type (such as :func:`~sqlalchemy.schema.Column`) will
typically accept a type class or instance; ``Integer`` is equivalent
to ``Integer()`` with no construction arguments in this case.
@@ -41,76 +41,58 @@ type is emitted in ``CREATE TABLE``, such as ``VARCHAR`` see `SQL
Standard Types`_ and the other sections of this chapter.
.. autoclass:: BigInteger
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: Boolean
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: Date
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: DateTime
- :show-inheritance:
:members:
.. autoclass:: Enum
- :show-inheritance:
:members: __init__, create, drop
.. autoclass:: Float
- :show-inheritance:
:members:
.. autoclass:: Integer
- :show-inheritance:
:members:
.. autoclass:: Interval
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: LargeBinary
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: Numeric
- :show-inheritance:
:members:
.. autoclass:: PickleType
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: SchemaType
- :show-inheritance:
:members:
:undoc-members:
.. autoclass:: SmallInteger
- :show-inheritance:
- :members:
+ :members:
.. autoclass:: String
- :show-inheritance:
:members:
.. autoclass:: Text
- :show-inheritance:
:members:
.. autoclass:: Time
- :show-inheritance:
:members:
.. autoclass:: Unicode
- :show-inheritance:
:members:
.. autoclass:: UnicodeText
- :show-inheritance:
:members:
.. _types_sqlstandard:
@@ -123,70 +105,70 @@ name when ``CREATE TABLE`` is issued. Some types may not be supported
on all databases.
.. autoclass:: BIGINT
- :show-inheritance:
+
.. autoclass:: BINARY
- :show-inheritance:
+
.. autoclass:: BLOB
- :show-inheritance:
+
.. autoclass:: BOOLEAN
- :show-inheritance:
+
.. autoclass:: CHAR
- :show-inheritance:
+
.. autoclass:: CLOB
- :show-inheritance:
+
.. autoclass:: DATE
- :show-inheritance:
+
.. autoclass:: DATETIME
- :show-inheritance:
+
.. autoclass:: DECIMAL
- :show-inheritance:
+
.. autoclass:: FLOAT
- :show-inheritance:
+
.. autoclass:: INT
- :show-inheritance:
+
.. autoclass:: sqlalchemy.types.INTEGER
- :show-inheritance:
+
.. autoclass:: NCHAR
- :show-inheritance:
+
.. autoclass:: NVARCHAR
- :show-inheritance:
+
.. autoclass:: NUMERIC
- :show-inheritance:
+
.. autoclass:: REAL
- :show-inheritance:
+
.. autoclass:: SMALLINT
- :show-inheritance:
+
.. autoclass:: TEXT
- :show-inheritance:
+
.. autoclass:: TIME
- :show-inheritance:
+
.. autoclass:: TIMESTAMP
- :show-inheritance:
+
.. autoclass:: VARBINARY
- :show-inheritance:
+
.. autoclass:: VARCHAR
- :show-inheritance:
+
.. _types_vendor:
@@ -194,7 +176,7 @@ Vendor-Specific Types
---------------------
Database-specific types are also available for import from each
-database's dialect module. See the :ref:`sqlalchemy.dialects_toplevel`
+database's dialect module. See the :ref:`dialect_toplevel`
reference for the database you're interested in.
For example, MySQL has a ``BIGINT`` type and PostgreSQL has an
@@ -300,7 +282,7 @@ to and from the database is required.
.. autoclass:: TypeDecorator
:members:
:inherited-members:
- :show-inheritance:
+
TypeDecorator Recipes
~~~~~~~~~~~~~~~~~~~~~
@@ -361,6 +343,8 @@ many decimal places. Here's a recipe that rounds them down::
value = value.quantize(self.quantize)
return value
+.. _custom_guid_type:
+
Backend-agnostic GUID Type
^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -690,7 +674,7 @@ to integers::
class MyInt(Integer):
class comparator_factory(Integer.Comparator):
def log(self, other):
- return func.log(self, other)
+ return func.log(self.expr, other)
Using the above type::
@@ -738,29 +722,25 @@ is needed, use :class:`.TypeDecorator` instead.
.. autoclass:: UserDefinedType
:members:
- :show-inheritance:
+
.. _types_api:
Base Type API
--------------
-.. autoclass:: AbstractType
- :members:
- :show-inheritance:
-
.. autoclass:: TypeEngine
:members:
- :show-inheritance:
+
.. autoclass:: Concatenable
:members:
:inherited-members:
- :show-inheritance:
+
.. autoclass:: NullType
- :show-inheritance:
+
.. autoclass:: Variant
- :show-inheritance:
+
:members: with_variant, __init__
diff --git a/doc/build/dialects/drizzle.rst b/doc/build/dialects/drizzle.rst
index 99ff596d7..c89bba032 100644
--- a/doc/build/dialects/drizzle.rst
+++ b/doc/build/dialects/drizzle.rst
@@ -23,51 +23,51 @@ construction arguments, are as follows:
.. autoclass:: BIGINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: CHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: DECIMAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: DOUBLE
:members: __init__
- :show-inheritance:
+
.. autoclass:: ENUM
:members: __init__
- :show-inheritance:
+
.. autoclass:: FLOAT
:members: __init__
- :show-inheritance:
+
.. autoclass:: INTEGER
:members: __init__
- :show-inheritance:
+
.. autoclass:: NUMERIC
:members: __init__
- :show-inheritance:
+
.. autoclass:: REAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: TEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TIMESTAMP
:members: __init__
- :show-inheritance:
+
.. autoclass:: VARCHAR
:members: __init__
- :show-inheritance:
+
MySQL-Python
diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst
index abf6e44f6..865d0714f 100644
--- a/doc/build/dialects/index.rst
+++ b/doc/build/dialects/index.rst
@@ -18,7 +18,6 @@ Included Dialects
drizzle
firebird
- informix
mssql
mysql
oracle
@@ -42,11 +41,35 @@ External Dialects
Current external dialect projects for SQLAlchemy include:
-* `ibm_db_sa <http://code.google.com/p/ibm-db/wiki/README>`_ - driver for IBM DB2, developed jointly by IBM and SQLAlchemy developers.
-* `sqlalchemy-access <https://bitbucket.org/zzzeek/sqlalchemy-access>`_ - driver for Microsoft Access.
-* `sqlalchemy-akiban <https://github.com/zzzeek/sqlalchemy_akiban>`_ - driver and ORM extensions for the `Akiban <http://www.akiban.com>`_ database.
-* `sqlalchemy-cubrid <https://bitbucket.org/zzzeek/sqlalchemy-cubrid>`_ - driver for the CUBRID database.
-* `sqlalchemy-maxdb <https://bitbucket.org/zzzeek/sqlalchemy-maxdb>`_ - driver for the MaxDB database.
+Production Ready
+^^^^^^^^^^^^^^^^
+
+* `ibm_db_sa <http://code.google.com/p/ibm-db/wiki/README>`_ - driver for IBM DB2 and Informix, developed jointly by IBM and SQLAlchemy developers.
+* `redshift-sqlalchemy <https://pypi.python.org/pypi/redshift-sqlalchemy>`_ - driver for Amazon Redshift, adapts
+the existing Postgresql/psycopg2 driver.
+* `sqlalchemy-sqlany <https://github.com/sqlanywhere/sqlalchemy-sqlany>`_ - driver for SAP Sybase SQL Anywhere, developed by SAP.
+* `sqlalchemy-monetdb <https://github.com/gijzelaerr/sqlalchemy-monetdb>`_ - driver for MonetDB.
+
+Experimental / Incomplete
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Dialects that are in an incomplete state or are considered somewhat experimental.
+
+* `sqlalchemy-foundationdb <https://github.com/FoundationDB/sql-layer-adapter-sqlalchemy>`_ - driver and ORM extensions for the `Foundation DB <http://foundationdb.com/>`_ database, making use of the `FoundationDB SQL Layer <https://foundationdb.com/layers/sql/index.html>`_.
* `CALCHIPAN <https://bitbucket.org/zzzeek/calchipan/>`_ - Adapts `Pandas <http://pandas.pydata.org/>`_ dataframes to SQLAlchemy.
+* `sqlalchemy-cubrid <https://bitbucket.org/zzzeek/sqlalchemy-cubrid>`_ - driver for the CUBRID database.
+
+Attic
+^^^^^
+
+Dialects in the "attic" are those that were contributed for SQLAlchemy long ago
+but have received little attention or demand since then, and are now moved out to
+their own repositories in at best a semi-working state.
+Community members interested in these dialects should feel free to pick up on
+their current codebase and fork off into working libraries.
+
+* `sqlalchemy-access <https://bitbucket.org/zzzeek/sqlalchemy-access>`_ - driver for Microsoft Access.
+* `sqlalchemy-informixdb <https://bitbucket.org/zzzeek/sqlalchemy-informixdb>`_ - driver for the informixdb DBAPI.
+* `sqlalchemy-maxdb <https://bitbucket.org/zzzeek/sqlalchemy-maxdb>`_ - driver for the MaxDB database
diff --git a/doc/build/dialects/informix.rst b/doc/build/dialects/informix.rst
deleted file mode 100644
index f37ae6cf5..000000000
--- a/doc/build/dialects/informix.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-.. _informix_toplevel:
-
-Informix
-========
-
-.. automodule:: sqlalchemy.dialects.informix.base
-
-informixdb
-----------
-
-.. automodule:: sqlalchemy.dialects.informix.informixdb \ No newline at end of file
diff --git a/doc/build/dialects/mssql.rst b/doc/build/dialects/mssql.rst
index 615d1a11d..6173ffba1 100644
--- a/doc/build/dialects/mssql.rst
+++ b/doc/build/dialects/mssql.rst
@@ -26,75 +26,75 @@ construction arguments, are as follows:
.. autoclass:: BIT
:members: __init__
- :show-inheritance:
+
.. autoclass:: CHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: DATETIME2
:members: __init__
- :show-inheritance:
+
.. autoclass:: DATETIMEOFFSET
:members: __init__
- :show-inheritance:
+
.. autoclass:: IMAGE
:members: __init__
- :show-inheritance:
+
.. autoclass:: MONEY
:members: __init__
- :show-inheritance:
+
.. autoclass:: NCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: NTEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: NVARCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: REAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: SMALLDATETIME
:members: __init__
- :show-inheritance:
+
.. autoclass:: SMALLMONEY
:members: __init__
- :show-inheritance:
+
.. autoclass:: SQL_VARIANT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TIME
:members: __init__
- :show-inheritance:
+
.. autoclass:: TINYINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: UNIQUEIDENTIFIER
:members: __init__
- :show-inheritance:
+
.. autoclass:: VARCHAR
:members: __init__
- :show-inheritance:
+
PyODBC
diff --git a/doc/build/dialects/mysql.rst b/doc/build/dialects/mysql.rst
index 1e2784554..de71a99ac 100644
--- a/doc/build/dialects/mysql.rst
+++ b/doc/build/dialects/mysql.rst
@@ -25,135 +25,135 @@ construction arguments, are as follows:
.. autoclass:: BIGINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: BINARY
:members: __init__
- :show-inheritance:
+
.. autoclass:: BIT
:members: __init__
- :show-inheritance:
+
.. autoclass:: BLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: BOOLEAN
:members: __init__
- :show-inheritance:
+
.. autoclass:: CHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: DATE
:members: __init__
- :show-inheritance:
+
.. autoclass:: DATETIME
:members: __init__
- :show-inheritance:
+
.. autoclass:: DECIMAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: DOUBLE
:members: __init__
- :show-inheritance:
+
.. autoclass:: ENUM
:members: __init__
- :show-inheritance:
+
.. autoclass:: FLOAT
:members: __init__
- :show-inheritance:
+
.. autoclass:: INTEGER
:members: __init__
- :show-inheritance:
+
.. autoclass:: LONGBLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: LONGTEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: MEDIUMBLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: MEDIUMINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: MEDIUMTEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: NCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: NUMERIC
:members: __init__
- :show-inheritance:
+
.. autoclass:: NVARCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: REAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: SET
:members: __init__
- :show-inheritance:
+
.. autoclass:: SMALLINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TIME
:members: __init__
- :show-inheritance:
+
.. autoclass:: TIMESTAMP
:members: __init__
- :show-inheritance:
+
.. autoclass:: TINYBLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: TINYINT
:members: __init__
- :show-inheritance:
+
.. autoclass:: TINYTEXT
:members: __init__
- :show-inheritance:
+
.. autoclass:: VARBINARY
:members: __init__
- :show-inheritance:
+
.. autoclass:: VARCHAR
:members: __init__
- :show-inheritance:
+
.. autoclass:: YEAR
:members: __init__
- :show-inheritance:
+
MySQL-Python
--------------------
diff --git a/doc/build/dialects/oracle.rst b/doc/build/dialects/oracle.rst
index 4be8c5b51..32a544877 100644
--- a/doc/build/dialects/oracle.rst
+++ b/doc/build/dialects/oracle.rst
@@ -25,31 +25,31 @@ construction arguments, are as follows:
.. autoclass:: BFILE
:members: __init__
- :show-inheritance:
+
.. autoclass:: DOUBLE_PRECISION
:members: __init__
- :show-inheritance:
+
.. autoclass:: INTERVAL
:members: __init__
- :show-inheritance:
+
.. autoclass:: NCLOB
:members: __init__
- :show-inheritance:
+
.. autoclass:: NUMBER
:members: __init__
- :show-inheritance:
+
.. autoclass:: LONG
:members: __init__
- :show-inheritance:
+
.. autoclass:: RAW
:members: __init__
- :show-inheritance:
+
cx_Oracle
----------
diff --git a/doc/build/dialects/postgresql.rst b/doc/build/dialects/postgresql.rst
index 3c151483f..05b63506e 100644
--- a/doc/build/dialects/postgresql.rst
+++ b/doc/build/dialects/postgresql.rst
@@ -15,9 +15,9 @@ they originate from :mod:`sqlalchemy.types` or from the local dialect::
from sqlalchemy.dialects.postgresql import \
ARRAY, BIGINT, BIT, BOOLEAN, BYTEA, CHAR, CIDR, DATE, \
DOUBLE_PRECISION, ENUM, FLOAT, HSTORE, INET, INTEGER, \
- INTERVAL, MACADDR, NUMERIC, REAL, SMALLINT, TEXT, TIME, \
+ INTERVAL, JSON, MACADDR, NUMERIC, REAL, SMALLINT, TEXT, TIME, \
TIMESTAMP, UUID, VARCHAR, INT4RANGE, INT8RANGE, NUMRANGE, \
- DATERANGE, TSRANGE, TSTZRANGE
+ DATERANGE, TSRANGE, TSTZRANGE, TSVECTOR
Types which are specific to PostgreSQL, or have PostgreSQL-specific
construction arguments, are as follows:
@@ -28,7 +28,7 @@ construction arguments, are as follows:
.. autoclass:: ARRAY
:members: __init__, Comparator
- :show-inheritance:
+
.. autoclass:: Any
@@ -36,51 +36,58 @@ construction arguments, are as follows:
.. autoclass:: BIT
:members: __init__
- :show-inheritance:
+
.. autoclass:: BYTEA
:members: __init__
- :show-inheritance:
+
.. autoclass:: CIDR
:members: __init__
- :show-inheritance:
+
.. autoclass:: DOUBLE_PRECISION
:members: __init__
- :show-inheritance:
+
.. autoclass:: ENUM
:members: __init__, create, drop
- :show-inheritance:
+
.. autoclass:: HSTORE
:members:
- :show-inheritance:
+
.. autoclass:: hstore
:members:
- :show-inheritance:
+
.. autoclass:: INET
:members: __init__
- :show-inheritance:
+
.. autoclass:: INTERVAL
:members: __init__
- :show-inheritance:
+
+.. autoclass:: JSON
+ :members:
+
+.. autoclass:: JSONElement
+ :members:
.. autoclass:: MACADDR
:members: __init__
- :show-inheritance:
+
.. autoclass:: REAL
:members: __init__
- :show-inheritance:
+
+.. autoclass:: TSVECTOR
+ :members: __init__
.. autoclass:: UUID
:members: __init__
- :show-inheritance:
+
Range Types
~~~~~~~~~~~
@@ -89,22 +96,22 @@ The new range column types founds in PostgreSQL 9.2 onwards are
catered for by the following types:
.. autoclass:: INT4RANGE
- :show-inheritance:
+
.. autoclass:: INT8RANGE
- :show-inheritance:
+
.. autoclass:: NUMRANGE
- :show-inheritance:
+
.. autoclass:: DATERANGE
- :show-inheritance:
+
.. autoclass:: TSRANGE
- :show-inheritance:
+
.. autoclass:: TSTZRANGE
- :show-inheritance:
+
The types above get most of their functionality from the following
mixin:
@@ -127,7 +134,6 @@ SQLAlchemy supports Postgresql EXCLUDE constraints via the
:class:`ExcludeConstraint` class:
.. autoclass:: ExcludeConstraint
- :show-inheritance:
:members: __init__
For example::
diff --git a/doc/build/faq.rst b/doc/build/faq.rst
new file mode 100644
index 000000000..dd7347b0b
--- /dev/null
+++ b/doc/build/faq.rst
@@ -0,0 +1,942 @@
+:orphan:
+
+.. _faq_toplevel:
+
+============================
+Frequently Asked Questions
+============================
+
+.. contents::
+ :local:
+ :class: faq
+ :backlinks: none
+
+
+Connections / Engines
+=====================
+
+How do I configure logging?
+---------------------------
+
+See :ref:`dbengine_logging`.
+
+How do I pool database connections? Are my connections pooled?
+----------------------------------------------------------------
+
+SQLAlchemy performs application-level connection pooling automatically
+in most cases. With the exception of SQLite, a :class:`.Engine` object
+refers to a :class:`.QueuePool` as a source of connectivity.
+
+For more detail, see :ref:`engines_toplevel` and :ref:`pooling_toplevel`.
+
+How do I pass custom connect arguments to my database API?
+-----------------------------------------------------------
+
+The :func:`.create_engine` call accepts additional arguments either
+directly via the ``connect_args`` keyword argument::
+
+ e = create_engine("mysql://scott:tiger@localhost/test",
+ connect_args={"encoding": "utf8"})
+
+Or for basic string and integer arguments, they can usually be specified
+in the query string of the URL::
+
+ e = create_engine("mysql://scott:tiger@localhost/test?encoding=utf8")
+
+.. seealso::
+
+ :ref:`custom_dbapi_args`
+
+"MySQL Server has gone away"
+----------------------------
+
+There are two major causes for this error:
+
+1. The MySQL client closes connections which have been idle for a set period
+of time, defaulting to eight hours. This can be avoided by using the ``pool_recycle``
+setting with :func:`.create_engine`, described at :ref:`mysql_connection_timeouts`.
+
+2. Usage of the MySQLdb :term:`DBAPI`, or a similar DBAPI, in a non-threadsafe manner, or in an otherwise
+inappropriate way. The MySQLdb connection object is not threadsafe - this expands
+out to any SQLAlchemy system that links to a single connection, which includes the ORM
+:class:`.Session`. For background
+on how :class:`.Session` should be used in a multithreaded environment,
+see :ref:`session_faq_threadsafe`.
+
+Why does SQLAlchemy issue so many ROLLBACKs?
+---------------------------------------------
+
+SQLAlchemy currently assumes DBAPI connections are in "non-autocommit" mode -
+this is the default behavior of the Python database API, meaning it
+must be assumed that a transaction is always in progress. The
+connection pool issues ``connection.rollback()`` when a connection is returned.
+This is so that any transactional resources remaining on the connection are
+released. On a database like Postgresql or MSSQL where table resources are
+aggressively locked, this is critical so that rows and tables don't remain
+locked within connections that are no longer in use. An application can
+otherwise hang. It's not just for locks, however, and is equally critical on
+any database that has any kind of transaction isolation, including MySQL with
+InnoDB. Any connection that is still inside an old transaction will return
+stale data, if that data was already queried on that connection within
+isolation. For background on why you might see stale data even on MySQL, see
+http://dev.mysql.com/doc/refman/5.1/en/innodb-transaction-model.html
+
+I'm on MyISAM - how do I turn it off?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The behavior of the connection pool's connection return behavior can be
+configured using ``reset_on_return``::
+
+ from sqlalchemy import create_engine
+ from sqlalchemy.pool import QueuePool
+
+ engine = create_engine('mysql://scott:tiger@localhost/myisam_database', pool=QueuePool(reset_on_return=False))
+
+I'm on SQL Server - how do I turn those ROLLBACKs into COMMITs?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+``reset_on_return`` accepts the values ``commit``, ``rollback`` in addition
+to ``True``, ``False``, and ``None``. Setting to ``commit`` will cause
+a COMMIT as any connection is returned to the pool::
+
+ engine = create_engine('mssql://scott:tiger@mydsn', pool=QueuePool(reset_on_return='commit'))
+
+
+I am using multiple connections with a SQLite database (typically to test transaction operation), and my test program is not working!
+----------------------------------------------------------------------------------------------------------------------------------------------------------
+
+If using a SQLite ``:memory:`` database, or a version of SQLAlchemy prior
+to version 0.7, the default connection pool is the :class:`.SingletonThreadPool`,
+which maintains exactly one SQLite connection per thread. So two
+connections in use in the same thread will actually be the same SQLite
+connection. Make sure you're not using a :memory: database and
+use :class:`.NullPool`, which is the default for non-memory databases in
+current SQLAlchemy versions.
+
+.. seealso::
+
+ :ref:`pysqlite_threading_pooling` - info on PySQLite's behavior.
+
+How do I get at the raw DBAPI connection when using an Engine?
+--------------------------------------------------------------
+
+With a regular SA engine-level Connection, you can get at a pool-proxied
+version of the DBAPI connection via the :attr:`.Connection.connection` attribute on
+:class:`.Connection`, and for the really-real DBAPI connection you can call the
+:attr:`.ConnectionFairy.connection` attribute on that - but there should never be any need to access
+the non-pool-proxied DBAPI connection, as all methods are proxied through::
+
+ engine = create_engine(...)
+ conn = engine.connect()
+ conn.connection.<do DBAPI things>
+ cursor = conn.connection.cursor(<DBAPI specific arguments..>)
+
+You must ensure that you revert any isolation level settings or other
+operation-specific settings on the connection back to normal before returning
+it to the pool.
+
+As an alternative to reverting settings, you can call the :meth:`.Connection.detach` method on
+either :class:`.Connection` or the proxied connection, which will de-associate
+the connection from the pool such that it will be closed and discarded
+when :meth:`.Connection.close` is called::
+
+ conn = engine.connect()
+ conn.detach() # detaches the DBAPI connection from the connection pool
+ conn.connection.<go nuts>
+ conn.close() # connection is closed for real, the pool replaces it with a new connection
+
+MetaData / Schema
+==================
+
+My program is hanging when I say ``table.drop()`` / ``metadata.drop_all()``
+----------------------------------------------------------------------------
+
+This usually corresponds to two conditions: 1. using PostgreSQL, which is really
+strict about table locks, and 2. you have a connection still open which
+contains locks on the table and is distinct from the connection being used for
+the DROP statement. Heres the most minimal version of the pattern::
+
+ connection = engine.connect()
+ result = connection.execute(mytable.select())
+
+ mytable.drop(engine)
+
+Above, a connection pool connection is still checked out; furthermore, the
+result object above also maintains a link to this connection. If
+"implicit execution" is used, the result will hold this connection opened until
+the result object is closed or all rows are exhausted.
+
+The call to ``mytable.drop(engine)`` attempts to emit DROP TABLE on a second
+connection procured from the :class:`.Engine` which will lock.
+
+The solution is to close out all connections before emitting DROP TABLE::
+
+ connection = engine.connect()
+ result = connection.execute(mytable.select())
+
+ # fully read result sets
+ result.fetchall()
+
+ # close connections
+ connection.close()
+
+ # now locks are removed
+ mytable.drop(engine)
+
+Does SQLAlchemy support ALTER TABLE, CREATE VIEW, CREATE TRIGGER, Schema Upgrade Functionality?
+-----------------------------------------------------------------------------------------------
+
+General ALTER support isn't present in SQLAlchemy directly. For special DDL
+on an ad-hoc basis, the :class:`.DDL` and related constructs can be used.
+See :doc:`core/ddl` for a discussion on this subject.
+
+A more comprehensive option is to use schema migration tools, such as Alembic
+or SQLAlchemy-Migrate; see :ref:`schema_migrations` for discussion on this.
+
+How can I sort Table objects in order of their dependency?
+-----------------------------------------------------------
+
+This is available via the :attr:`.MetaData.sorted_tables` function::
+
+ metadata = MetaData()
+ # ... add Table objects to metadata
+ ti = metadata.sorted_tables:
+ for t in ti:
+ print t
+
+How can I get the CREATE TABLE/ DROP TABLE output as a string?
+---------------------------------------------------------------
+
+Modern SQLAlchemy has clause constructs which represent DDL operations. These
+can be rendered to strings like any other SQL expression::
+
+ from sqlalchemy.schema import CreateTable
+
+ print CreateTable(mytable)
+
+To get the string specific to a certain engine::
+
+ print CreateTable(mytable).compile(engine)
+
+There's also a special form of :class:`.Engine` that can let you dump an entire
+metadata creation sequence, using this recipe::
+
+ def dump(sql, *multiparams, **params):
+ print sql.compile(dialect=engine.dialect)
+ engine = create_engine('postgresql://', strategy='mock', executor=dump)
+ metadata.create_all(engine, checkfirst=False)
+
+The `Alembic <https://bitbucket.org/zzzeek/alembic>`_ tool also supports
+an "offline" SQL generation mode that renders database migrations as SQL scripts.
+
+How can I subclass Table/Column to provide certain behaviors/configurations?
+------------------------------------------------------------------------------
+
+:class:`.Table` and :class:`.Column` are not good targets for direct subclassing.
+However, there are simple ways to get on-construction behaviors using creation
+functions, and behaviors related to the linkages between schema objects such as
+constraint conventions or naming conventions using attachment events.
+An example of many of these
+techniques can be seen at `Naming Conventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_.
+
+
+SQL Expressions
+=================
+
+Why does ``.col.in_([])`` Produce ``col != col``? Why not ``1=0``?
+-------------------------------------------------------------------
+
+A little introduction to the issue. The IN operator in SQL, given a list of
+elements to compare against a column, generally does not accept an empty list,
+that is while it is valid to say::
+
+ column IN (1, 2, 3)
+
+it's not valid to say::
+
+ column IN ()
+
+SQLAlchemy's :meth:`.Operators.in_` operator, when given an empty list, produces this
+expression::
+
+ column != column
+
+As of version 0.6, it also produces a warning stating that a less efficient
+comparison operation will be rendered. This expression is the only one that is
+both database agnostic and produces correct results.
+
+For example, the naive approach of "just evaluate to false, by comparing 1=0
+or 1!=1", does not handle nulls properly. An expression like::
+
+ NOT column != column
+
+will not return a row when "column" is null, but an expression which does not
+take the column into account::
+
+ NOT 1=0
+
+will.
+
+Closer to the mark is the following CASE expression::
+
+ CASE WHEN column IS NOT NULL THEN 1=0 ELSE NULL END
+
+We don't use this expression due to its verbosity, and its also not
+typically accepted by Oracle within a WHERE clause - depending
+on how you phrase it, you'll either get "ORA-00905: missing keyword" or
+"ORA-00920: invalid relational operator". It's also still less efficient than
+just rendering SQL without the clause altogether (or not issuing the SQL at
+all, if the statement is just a simple search).
+
+The best approach therefore is to avoid the usage of IN given an argument list
+of zero length. Instead, don't emit the Query in the first place, if no rows
+should be returned. The warning is best promoted to a full error condition
+using the Python warnings filter (see http://docs.python.org/library/warnings.html).
+
+ORM Configuration
+==================
+
+.. _faq_mapper_primary_key:
+
+How do I map a table that has no primary key?
+---------------------------------------------
+
+The SQLAlchemy ORM, in order to map to a particular table, needs there to be
+at least one column denoted as a primary key column; multiple-column,
+i.e. composite, primary keys are of course entirely feasible as well. These
+columns do **not** need to be actually known to the database as primary key
+columns, though it's a good idea that they are. It's only necessary that the columns
+*behave* as a primary key does, e.g. as a unique and not nullable identifier
+for a row.
+
+Most ORMs require that objects have some kind of primary key defined
+because the object in memory must correspond to a uniquely identifiable
+row in the database table; at the very least, this allows the
+object can be targeted for UPDATE and DELETE statements which will affect only
+that object's row and no other. However, the importance of the primary key
+goes far beyond that. In SQLAlchemy, all ORM-mapped objects are at all times
+linked uniquely within a :class:`.Session`
+to their specific database row using a pattern called the :term:`identity map`,
+a pattern that's central to the unit of work system employed by SQLAlchemy,
+and is also key to the most common (and not-so-common) patterns of ORM usage.
+
+
+.. note::
+
+ It's important to note that we're only talking about the SQLAlchemy ORM; an
+ application which builds on Core and deals only with :class:`.Table` objects,
+ :func:`.select` constructs and the like, **does not** need any primary key
+ to be present on or associated with a table in any way (though again, in SQL, all tables
+ should really have some kind of primary key, lest you need to actually
+ update or delete specific rows).
+
+In almost all cases, a table does have a so-called :term:`candidate key`, which is a column or series
+of columns that uniquely identify a row. If a table truly doesn't have this, and has actual
+fully duplicate rows, the table is not corresponding to `first normal form <http://en.wikipedia.org/wiki/First_normal_form>`_ and cannot be mapped. Otherwise, whatever columns comprise the best candidate key can be
+applied directly to the mapper::
+
+ class SomeClass(Base):
+ __table__ = some_table_with_no_pk
+ __mapper_args__ = {
+ 'primary_key':[some_table_with_no_pk.c.uid, some_table_with_no_pk.c.bar]
+ }
+
+Better yet is when using fully declared table metadata, use the ``primary_key=True``
+flag on those columns::
+
+ class SomeClass(Base):
+ __tablename__ = "some_table_with_no_pk"
+
+ uid = Column(Integer, primary_key=True)
+ bar = Column(String, primary_key=True)
+
+All tables in a relational database should have primary keys. Even a many-to-many
+association table - the primary key would be the composite of the two association
+columns::
+
+ CREATE TABLE my_association (
+ user_id INTEGER REFERENCES user(id),
+ account_id INTEGER REFERENCES account(id),
+ PRIMARY KEY (user_id, account_id)
+ )
+
+
+How do I configure a Column that is a Python reserved word or similar?
+----------------------------------------------------------------------------
+
+Column-based attributes can be given any name desired in the mapping. See
+:ref:`mapper_column_distinct_names`.
+
+How do I get a list of all columns, relationships, mapped attributes, etc. given a mapped class?
+-------------------------------------------------------------------------------------------------
+
+This information is all available from the :class:`.Mapper` object.
+
+To get at the :class:`.Mapper` for a particular mapped class, call the
+:func:`.inspect` function on it::
+
+ from sqlalchemy import inspect
+
+ mapper = inspect(MyClass)
+
+From there, all information about the class can be acquired using such methods as:
+
+* :attr:`.Mapper.attrs` - a namespace of all mapped attributes. The attributes
+ themselves are instances of :class:`.MapperProperty`, which contain additional
+ attributes that can lead to the mapped SQL expression or column, if applicable.
+
+* :attr:`.Mapper.column_attrs` - the mapped attribute namespace
+ limited to column and SQL expression attributes. You might want to use
+ :attr:`.Mapper.columns` to get at the :class:`.Column` objects directly.
+
+* :attr:`.Mapper.relationships` - namespace of all :class:`.RelationshipProperty` attributes.
+
+* :attr:`.Mapper.all_orm_descriptors` - namespace of all mapped attributes, plus user-defined
+ attributes defined using systems such as :class:`.hybrid_property`, :class:`.AssociationProxy` and others.
+
+* :attr:`.Mapper.columns` - A namespace of :class:`.Column` objects and other named
+ SQL expressions associated with the mapping.
+
+* :attr:`.Mapper.mapped_table` - The :class:`.Table` or other selectable to which
+ this mapper is mapped.
+
+* :attr:`.Mapper.local_table` - The :class:`.Table` that is "local" to this mapper;
+ this differs from :attr:`.Mapper.mapped_table` in the case of a mapper mapped
+ using inheritance to a composed selectable.
+
+I'm using Declarative and setting primaryjoin/secondaryjoin using an ``and_()`` or ``or_()``, and I am getting an error message about foreign keys.
+------------------------------------------------------------------------------------------------------------------------------------------------------------------
+
+Are you doing this?::
+
+ class MyClass(Base):
+ # ....
+
+ foo = relationship("Dest", primaryjoin=and_("MyClass.id==Dest.foo_id", "MyClass.foo==Dest.bar"))
+
+That's an ``and_()`` of two string expressions, which SQLAlchemy cannot apply any mapping towards. Declarative allows :func:`.relationship` arguments to be specified as strings, which are converted into expression objects using ``eval()``. But this doesn't occur inside of an ``and_()`` expression - it's a special operation declarative applies only to the *entirety* of what's passed to primaryjoin or other arguments as a string::
+
+ class MyClass(Base):
+ # ....
+
+ foo = relationship("Dest", primaryjoin="and_(MyClass.id==Dest.foo_id, MyClass.foo==Dest.bar)")
+
+Or if the objects you need are already available, skip the strings::
+
+ class MyClass(Base):
+ # ....
+
+ foo = relationship(Dest, primaryjoin=and_(MyClass.id==Dest.foo_id, MyClass.foo==Dest.bar))
+
+The same idea applies to all the other arguments, such as ``foreign_keys``::
+
+ # wrong !
+ foo = relationship(Dest, foreign_keys=["Dest.foo_id", "Dest.bar_id"])
+
+ # correct !
+ foo = relationship(Dest, foreign_keys="[Dest.foo_id, Dest.bar_id]")
+
+ # also correct !
+ foo = relationship(Dest, foreign_keys=[Dest.foo_id, Dest.bar_id])
+
+ # if you're using columns from the class that you're inside of, just use the column objects !
+ class MyClass(Base):
+ foo_id = Column(...)
+ bar_id = Column(...)
+ # ...
+
+ foo = relationship(Dest, foreign_keys=[foo_id, bar_id])
+
+
+Sessions / Queries
+===================
+
+"This Session's transaction has been rolled back due to a previous exception during flush." (or similar)
+---------------------------------------------------------------------------------------------------------
+
+This is an error that occurs when a :meth:`.Session.flush` raises an exception, rolls back
+the transaction, but further commands upon the `Session` are called without an
+explicit call to :meth:`.Session.rollback` or :meth:`.Session.close`.
+
+It usually corresponds to an application that catches an exception
+upon :meth:`.Session.flush` or :meth:`.Session.commit` and
+does not properly handle the exception. For example::
+
+ from sqlalchemy import create_engine, Column, Integer
+ from sqlalchemy.orm import sessionmaker
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base(create_engine('sqlite://'))
+
+ class Foo(Base):
+ __tablename__ = 'foo'
+ id = Column(Integer, primary_key=True)
+
+ Base.metadata.create_all()
+
+ session = sessionmaker()()
+
+ # constraint violation
+ session.add_all([Foo(id=1), Foo(id=1)])
+
+ try:
+ session.commit()
+ except:
+ # ignore error
+ pass
+
+ # continue using session without rolling back
+ session.commit()
+
+
+The usage of the :class:`.Session` should fit within a structure similar to this::
+
+ try:
+ <use session>
+ session.commit()
+ except:
+ session.rollback()
+ raise
+ finally:
+ session.close() # optional, depends on use case
+
+Many things can cause a failure within the try/except besides flushes. You
+should always have some kind of "framing" of your session operations so that
+connection and transaction resources have a definitive boundary, otherwise
+your application doesn't really have its usage of resources under control.
+This is not to say that you need to put try/except blocks all throughout your
+application - on the contrary, this would be a terrible idea. You should
+architect your application such that there is one (or few) point(s) of
+"framing" around session operations.
+
+For a detailed discussion on how to organize usage of the :class:`.Session`,
+please see :ref:`session_faq_whentocreate`.
+
+But why does flush() insist on issuing a ROLLBACK?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It would be great if :meth:`.Session.flush` could partially complete and then not roll
+back, however this is beyond its current capabilities since its internal
+bookkeeping would have to be modified such that it can be halted at any time
+and be exactly consistent with what's been flushed to the database. While this
+is theoretically possible, the usefulness of the enhancement is greatly
+decreased by the fact that many database operations require a ROLLBACK in any
+case. Postgres in particular has operations which, once failed, the
+transaction is not allowed to continue::
+
+ test=> create table foo(id integer primary key);
+ NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "foo_pkey" for table "foo"
+ CREATE TABLE
+ test=> begin;
+ BEGIN
+ test=> insert into foo values(1);
+ INSERT 0 1
+ test=> commit;
+ COMMIT
+ test=> begin;
+ BEGIN
+ test=> insert into foo values(1);
+ ERROR: duplicate key value violates unique constraint "foo_pkey"
+ test=> insert into foo values(2);
+ ERROR: current transaction is aborted, commands ignored until end of transaction block
+
+What SQLAlchemy offers that solves both issues is support of SAVEPOINT, via
+:meth:`.Session.begin_nested`. Using :meth:`.Session.begin_nested`, you can frame an operation that may
+potentially fail within a transaction, and then "roll back" to the point
+before its failure while maintaining the enclosing transaction.
+
+But why isn't the one automatic call to ROLLBACK enough? Why must I ROLLBACK again?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This is again a matter of the :class:`.Session` providing a consistent interface and
+refusing to guess about what context its being used. For example, the
+:class:`.Session` supports "framing" above within multiple levels. Such as, suppose
+you had a decorator ``@with_session()``, which did this::
+
+ def with_session(fn):
+ def go(*args, **kw):
+ session.begin(subtransactions=True)
+ try:
+ ret = fn(*args, **kw)
+ session.commit()
+ return ret
+ except:
+ session.rollback()
+ raise
+ return go
+
+The above decorator begins a transaction if one does not exist already, and
+then commits it, if it were the creator. The "subtransactions" flag means that
+if :meth:`.Session.begin` were already called by an enclosing function, nothing happens
+except a counter is incremented - this counter is decremented when :meth:`.Session.commit`
+is called and only when it goes back to zero does the actual COMMIT happen. It
+allows this usage pattern::
+
+ @with_session
+ def one():
+ # do stuff
+ two()
+
+
+ @with_session
+ def two():
+ # etc.
+
+ one()
+
+ two()
+
+``one()`` can call ``two()``, or ``two()`` can be called by itself, and the
+``@with_session`` decorator ensures the appropriate "framing" - the transaction
+boundaries stay on the outermost call level. As you can see, if ``two()`` calls
+``flush()`` which throws an exception and then issues a ``rollback()``, there will
+*always* be a second ``rollback()`` performed by the decorator, and possibly a
+third corresponding to two levels of decorator. If the ``flush()`` pushed the
+``rollback()`` all the way out to the top of the stack, and then we said that
+all remaining ``rollback()`` calls are moot, there is some silent behavior going
+on there. A poorly written enclosing method might suppress the exception, and
+then call ``commit()`` assuming nothing is wrong, and then you have a silent
+failure condition. The main reason people get this error in fact is because
+they didn't write clean "framing" code and they would have had other problems
+down the road.
+
+If you think the above use case is a little exotic, the same kind of thing
+comes into play if you want to SAVEPOINT- you might call ``begin_nested()``
+several times, and the ``commit()``/``rollback()`` calls each resolve the most
+recent ``begin_nested()``. The meaning of ``rollback()`` or ``commit()`` is
+dependent upon which enclosing block it is called, and you might have any
+sequence of ``rollback()``/``commit()`` in any order, and its the level of nesting
+that determines their behavior.
+
+In both of the above cases, if ``flush()`` broke the nesting of transaction
+blocks, the behavior is, depending on scenario, anywhere from "magic" to
+silent failure to blatant interruption of code flow.
+
+``flush()`` makes its own "subtransaction", so that a transaction is started up
+regardless of the external transactional state, and when complete it calls
+``commit()``, or ``rollback()`` upon failure - but that ``rollback()`` corresponds
+to its own subtransaction - it doesn't want to guess how you'd like to handle
+the external "framing" of the transaction, which could be nested many levels
+with any combination of subtransactions and real SAVEPOINTs. The job of
+starting/ending the "frame" is kept consistently with the code external to the
+``flush()``, and we made a decision that this was the most consistent approach.
+
+I'm inserting 400,000 rows with the ORM and it's really slow!
+--------------------------------------------------------------
+
+The SQLAlchemy ORM uses the :term:`unit of work` pattern when synchronizing
+changes to the database. This pattern goes far beyond simple "inserts"
+of data. It includes that attributes which are assigned on objects are
+received using an attribute instrumentation system which tracks
+changes on objects as they are made, includes that all rows inserted
+are tracked in an identity map which has the effect that for each row
+SQLAlchemy must retrieve its "last inserted id" if not already given,
+and also involves that rows to be inserted are scanned and sorted for
+dependencies as needed. Objects are also subject to a fair degree of
+bookkeeping in order to keep all of this running, which for a very
+large number of rows at once can create an inordinate amount of time
+spent with large data structures, hence it's best to chunk these.
+
+Basically, unit of work is a large degree of automation in order to
+automate the task of persisting a complex object graph into a
+relational database with no explicit persistence code, and this
+automation has a price.
+
+ORMs are basically not intended for high-performance bulk inserts -
+this is the whole reason SQLAlchemy offers the Core in addition to the
+ORM as a first-class component.
+
+For the use case of fast bulk inserts, the
+SQL generation and execution system that the ORM builds on top of
+is part of the Core. Using this system directly, we can produce an INSERT that
+is competitive with using the raw database API directly.
+
+The example below illustrates time-based tests for four different
+methods of inserting rows, going from the most automated to the least.
+With cPython 2.7, runtimes observed::
+
+ classics-MacBook-Pro:sqlalchemy classic$ python test.py
+ SQLAlchemy ORM: Total time for 100000 records 14.3528850079 secs
+ SQLAlchemy ORM pk given: Total time for 100000 records 10.0164160728 secs
+ SQLAlchemy Core: Total time for 100000 records 0.775382995605 secs
+ sqlite3: Total time for 100000 records 0.676795005798 sec
+
+We can reduce the time by a factor of three using recent versions of `Pypy <http://pypy.org/>`_::
+
+ classics-MacBook-Pro:sqlalchemy classic$ /usr/local/src/pypy-2.1-beta2-osx64/bin/pypy test.py
+ SQLAlchemy ORM: Total time for 100000 records 5.88369488716 secs
+ SQLAlchemy ORM pk given: Total time for 100000 records 3.52294301987 secs
+ SQLAlchemy Core: Total time for 100000 records 0.613556146622 secs
+ sqlite3: Total time for 100000 records 0.442467927933 sec
+
+Script::
+
+ import time
+ import sqlite3
+
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy import Column, Integer, String, create_engine
+ from sqlalchemy.orm import scoped_session, sessionmaker
+
+ Base = declarative_base()
+ DBSession = scoped_session(sessionmaker())
+ engine = None
+
+ class Customer(Base):
+ __tablename__ = "customer"
+ id = Column(Integer, primary_key=True)
+ name = Column(String(255))
+
+ def init_sqlalchemy(dbname='sqlite:///sqlalchemy.db'):
+ global engine
+ engine = create_engine(dbname, echo=False)
+ DBSession.remove()
+ DBSession.configure(bind=engine, autoflush=False, expire_on_commit=False)
+ Base.metadata.drop_all(engine)
+ Base.metadata.create_all(engine)
+
+ def test_sqlalchemy_orm(n=100000):
+ init_sqlalchemy()
+ t0 = time.time()
+ for i in range(n):
+ customer = Customer()
+ customer.name = 'NAME ' + str(i)
+ DBSession.add(customer)
+ if i % 1000 == 0:
+ DBSession.flush()
+ DBSession.commit()
+ print("SQLAlchemy ORM: Total time for " + str(n) +
+ " records " + str(time.time() - t0) + " secs")
+
+ def test_sqlalchemy_orm_pk_given(n=100000):
+ init_sqlalchemy()
+ t0 = time.time()
+ for i in range(n):
+ customer = Customer(id=i+1, name="NAME " + str(i))
+ DBSession.add(customer)
+ if i % 1000 == 0:
+ DBSession.flush()
+ DBSession.commit()
+ print("SQLAlchemy ORM pk given: Total time for " + str(n) +
+ " records " + str(time.time() - t0) + " secs")
+
+ def test_sqlalchemy_core(n=100000):
+ init_sqlalchemy()
+ t0 = time.time()
+ engine.execute(
+ Customer.__table__.insert(),
+ [{"name": 'NAME ' + str(i)} for i in range(n)]
+ )
+ print("SQLAlchemy Core: Total time for " + str(n) +
+ " records " + str(time.time() - t0) + " secs")
+
+ def init_sqlite3(dbname):
+ conn = sqlite3.connect(dbname)
+ c = conn.cursor()
+ c.execute("DROP TABLE IF EXISTS customer")
+ c.execute("CREATE TABLE customer (id INTEGER NOT NULL, "
+ "name VARCHAR(255), PRIMARY KEY(id))")
+ conn.commit()
+ return conn
+
+ def test_sqlite3(n=100000, dbname='sqlite3.db'):
+ conn = init_sqlite3(dbname)
+ c = conn.cursor()
+ t0 = time.time()
+ for i in range(n):
+ row = ('NAME ' + str(i),)
+ c.execute("INSERT INTO customer (name) VALUES (?)", row)
+ conn.commit()
+ print("sqlite3: Total time for " + str(n) +
+ " records " + str(time.time() - t0) + " sec")
+
+ if __name__ == '__main__':
+ test_sqlalchemy_orm(100000)
+ test_sqlalchemy_orm_pk_given(100000)
+ test_sqlalchemy_core(100000)
+ test_sqlite3(100000)
+
+
+
+How do I make a Query that always adds a certain filter to every query?
+------------------------------------------------------------------------------------------------
+
+See the recipe at `PreFilteredQuery <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/PreFilteredQuery>`_.
+
+I've created a mapping against an Outer Join, and while the query returns rows, no objects are returned. Why not?
+------------------------------------------------------------------------------------------------------------------
+
+Rows returned by an outer join may contain NULL for part of the primary key,
+as the primary key is the composite of both tables. The :class:`.Query` object ignores incoming rows
+that don't have an acceptable primary key. Based on the setting of the ``allow_partial_pks``
+flag on :func:`.mapper`, a primary key is accepted if the value has at least one non-NULL
+value, or alternatively if the value has no NULL values. See ``allow_partial_pks``
+at :func:`.mapper`.
+
+
+I'm using ``joinedload()`` or ``lazy=False`` to create a JOIN/OUTER JOIN and SQLAlchemy is not constructing the correct query when I try to add a WHERE, ORDER BY, LIMIT, etc. (which relies upon the (OUTER) JOIN)
+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+
+The joins generated by joined eager loading are only used to fully load related
+collections, and are designed to have no impact on the primary results of the query.
+Since they are anonymously aliased, they cannot be referenced directly.
+
+For detail on this beahvior, see :doc:`orm/loading`.
+
+Query has no ``__len__()``, why not?
+------------------------------------
+
+The Python ``__len__()`` magic method applied to an object allows the ``len()``
+builtin to be used to determine the length of the collection. It's intuitive
+that a SQL query object would link ``__len__()`` to the :meth:`.Query.count`
+method, which emits a `SELECT COUNT`. The reason this is not possible is
+because evaluating the query as a list would incur two SQL calls instead of
+one::
+
+ class Iterates(object):
+ def __len__(self):
+ print "LEN!"
+ return 5
+
+ def __iter__(self):
+ print "ITER!"
+ return iter([1, 2, 3, 4, 5])
+
+ list(Iterates())
+
+output::
+
+ ITER!
+ LEN!
+
+How Do I use Textual SQL with ORM Queries?
+-------------------------------------------
+
+See:
+
+* :ref:`orm_tutorial_literal_sql` - Ad-hoc textual blocks with :class:`.Query`
+
+* :ref:`session_sql_expressions` - Using :class:`.Session` with textual SQL directly.
+
+I'm calling ``Session.delete(myobject)`` and it isn't removed from the parent collection!
+------------------------------------------------------------------------------------------
+
+See :ref:`session_deleting_from_collections` for a description of this behavior.
+
+why isnt my ``__init__()`` called when I load objects?
+------------------------------------------------------
+
+See :ref:`mapping_constructors` for a description of this behavior.
+
+how do I use ON DELETE CASCADE with SA's ORM?
+----------------------------------------------
+
+SQLAlchemy will always issue UPDATE or DELETE statements for dependent
+rows which are currently loaded in the :class:`.Session`. For rows which
+are not loaded, it will by default issue SELECT statements to load
+those rows and udpate/delete those as well; in other words it assumes
+there is no ON DELETE CASCADE configured.
+To configure SQLAlchemy to cooperate with ON DELETE CASCADE, see
+:ref:`passive_deletes`.
+
+I set the "foo_id" attribute on my instance to "7", but the "foo" attribute is still ``None`` - shouldn't it have loaded Foo with id #7?
+----------------------------------------------------------------------------------------------------------------------------------------------------
+
+The ORM is not constructed in such a way as to support
+immediate population of relationships driven from foreign
+key attribute changes - instead, it is designed to work the
+other way around - foreign key attributes are handled by the
+ORM behind the scenes, the end user sets up object
+relationships naturally. Therefore, the recommended way to
+set ``o.foo`` is to do just that - set it!::
+
+ foo = Session.query(Foo).get(7)
+ o.foo = foo
+ Session.commit()
+
+Manipulation of foreign key attributes is of course entirely legal. However,
+setting a foreign-key attribute to a new value currently does not trigger
+an "expire" event of the :func:`.relationship` in which it's involved. This means
+that for the following sequence::
+
+ o = Session.query(SomeClass).first()
+ assert o.foo is None # accessing an un-set attribute sets it to None
+ o.foo_id = 7
+
+``o.foo`` is initialized to ``None`` when we first accessed it. Setting
+``o.foo_id = 7`` will have the value of "7" as pending, but no flush
+has occurred - so ``o.foo`` is still ``None``::
+
+ # attribute is already set to None, has not been
+ # reconciled with o.foo_id = 7 yet
+ assert o.foo is None
+
+For ``o.foo`` to load based on the foreign key mutation is usually achieved
+naturally after the commit, which both flushes the new foreign key value
+and expires all state::
+
+ Session.commit() # expires all attributes
+
+ foo_7 = Session.query(Foo).get(7)
+
+ assert o.foo is foo_7 # o.foo lazyloads on access
+
+A more minimal operation is to expire the attribute individually - this can
+be performed for any :term:`persistent` object using :meth:`.Session.expire`::
+
+ o = Session.query(SomeClass).first()
+ o.foo_id = 7
+ Session.expire(o, ['foo']) # object must be persistent for this
+
+ foo_7 = Session.query(Foo).get(7)
+
+ assert o.foo is foo_7 # o.foo lazyloads on access
+
+Note that if the object is not persistent but present in the :class:`.Session`,
+it's known as :term:`pending`. This means the row for the object has not been
+INSERTed into the database yet. For such an object, setting ``foo_id`` does not
+have meaning until the row is inserted; otherwise there is no row yet::
+
+ new_obj = SomeClass()
+ new_obj.foo_id = 7
+
+ Session.add(new_obj)
+
+ # accessing an un-set attribute sets it to None
+ assert new_obj.foo is None
+
+ Session.flush() # emits INSERT
+
+ # expire this because we already set .foo to None
+ Session.expire(o, ['foo'])
+
+ assert new_obj.foo is foo_7 # now it loads
+
+
+.. topic:: Attribute loading for non-persistent objects
+
+ One variant on the "pending" behavior above is if we use the flag
+ ``load_on_pending`` on :func:`.relationship`. When this flag is set, the
+ lazy loader will emit for ``new_obj.foo`` before the INSERT proceeds; another
+ variant of this is to use the :meth:`.Session.enable_relationship_loading`
+ method, which can "attach" an object to a :class:`.Session` in such a way that
+ many-to-one relationships load as according to foreign key attributes
+ regardless of the object being in any particular state.
+ Both techniques are **not recommended for general use**; they were added to suit
+ specfic programming scenarios encountered by users which involve the repurposing
+ of the ORM's usual object states.
+
+The recipe `ExpireRelationshipOnFKChange <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/ExpireRelationshipOnFKChange>`_ features an example using SQLAlchemy events
+in order to coordinate the setting of foreign key attributes with many-to-one
+relationships.
+
+Is there a way to automagically have only unique keywords (or other kinds of objects) without doing a query for the keyword and getting a reference to the row containing that keyword?
+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+
+When people read the many-to-many example in the docs, they get hit with the
+fact that if you create the same ``Keyword`` twice, it gets put in the DB twice.
+Which is somewhat inconvenient.
+
+This `UniqueObject <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/UniqueObject>`_ recipe was created to address this issue.
+
+
diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst
index 564668691..defeabcff 100644
--- a/doc/build/glossary.rst
+++ b/doc/build/glossary.rst
@@ -1,15 +1,11 @@
+:orphan:
+
.. _glossary:
========
Glossary
========
-.. note::
-
- The Glossary is a brand new addition to the documentation. While
- sparse at the moment we hope to fill it up with plenty of new
- terms soon!
-
.. glossary::
:sorted:
@@ -95,8 +91,26 @@ Glossary
class which each represent a particular database column
or relationship to a related class.
+ identity map
+ A mapping between Python objects and their database identities.
+ The identity map is a collection that's associated with an
+ ORM :term:`session` object, and maintains a single instance
+ of every database object keyed to its identity. The advantage
+ to this pattern is that all operations which occur for a particular
+ database identity are transparently coordinated onto a single
+ object instance. When using an identity map in conjunction with
+ an :term:`isolated` transaction, having a reference
+ to an object that's known to have a particular primary key can
+ be considered from a practical standpoint to be a
+ proxy to the actual database row.
+
+ .. seealso::
+
+ Martin Fowler - Identity Map - http://martinfowler.com/eaaCatalog/identityMap.html
+
lazy load
lazy loads
+ lazy loading
In object relational mapping, a "lazy load" refers to an
attribute that does not contain its database-side value
for some period of time, typically when the object is
@@ -249,6 +263,15 @@ Glossary
`PEP 249 - Python Database API Specification v2.0 <http://www.python.org/dev/peps/pep-0249/>`_
+ domain model
+
+ A domain model in problem solving and software engineering is a conceptual model of all the topics related to a specific problem. It describes the various entities, their attributes, roles, and relationships, plus the constraints that govern the problem domain.
+
+ (via Wikipedia)
+
+ .. seealso::
+
+ `Domain Model (wikipedia) <http://en.wikipedia.org/wiki/Domain_model>`_
unit of work
This pattern is where the system transparently keeps
@@ -263,6 +286,16 @@ Glossary
:doc:`orm/session`
+ Session
+ The container or scope for ORM database operations. Sessions
+ load instances from the database, track changes to mapped
+ instances and persist changes in a single unit of work when
+ flushed.
+
+ .. seealso::
+
+ :doc:`orm/session`
+
columns clause
The portion of the ``SELECT`` statement which enumerates the
SQL expressions to be returned in the result set. The expressions
@@ -411,3 +444,599 @@ Glossary
query via its ``FROM``
clause is not possible, because the correlation can only proceed once the
original source rows from the enclosing statement's FROM clause are available.
+
+
+ ACID
+ ACID model
+ An acronym for "Atomicity, Consistency, Isolation,
+ Durability"; a set of properties that guarantee that
+ database transactions are processed reliably.
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`atomicity`
+
+ :term:`consistency`
+
+ :term:`isolation`
+
+ :term:`durability`
+
+ http://en.wikipedia.org/wiki/ACID_Model
+
+ atomicity
+ Atomicity is one of the components of the :term:`ACID` model,
+ and requires that each transaction is "all or nothing":
+ if one part of the transaction fails, the entire transaction
+ fails, and the database state is left unchanged. An atomic
+ system must guarantee atomicity in each and every situation,
+ including power failures, errors, and crashes.
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`ACID`
+
+ http://en.wikipedia.org/wiki/Atomicity_(database_systems)
+
+ consistency
+ Consistency is one of the compoments of the :term:`ACID` model,
+ and ensures that any transaction will
+ bring the database from one valid state to another. Any data
+ written to the database must be valid according to all defined
+ rules, including but not limited to :term:`constraints`, cascades,
+ triggers, and any combination thereof.
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`ACID`
+
+ http://en.wikipedia.org/wiki/Consistency_(database_systems)
+
+ isolation
+ isolated
+ The isolation property of the :term:`ACID` model
+ ensures that the concurrent execution
+ of transactions results in a system state that would be
+ obtained if transactions were executed serially, i.e. one
+ after the other. Each transaction must execute in total
+ isolation i.e. if T1 and T2 execute concurrently then each
+ should remain independent of the other.
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`ACID`
+
+ http://en.wikipedia.org/wiki/Isolation_(database_systems)
+
+ durability
+ Durability is a property of the :term:`ACID` model
+ which means that once a transaction has been committed,
+ it will remain so, even in the event of power loss, crashes,
+ or errors. In a relational database, for instance, once a
+ group of SQL statements execute, the results need to be stored
+ permanently (even if the database crashes immediately
+ thereafter).
+ (via Wikipedia)
+
+ .. seealso::
+
+ :term:`ACID`
+
+ http://en.wikipedia.org/wiki/Durability_(database_systems)
+
+ RETURNING
+ This is a non-SQL standard clause provided in various forms by
+ certain backends, which provides the service of returning a result
+ set upon execution of an INSERT, UPDATE or DELETE statement. Any set
+ of columns from the matched rows can be returned, as though they were
+ produced from a SELECT statement.
+
+ The RETURNING clause provides both a dramatic performance boost to
+ common update/select scenarios, including retrieval of inline- or
+ default- generated primary key values and defaults at the moment they
+ were created, as well as a way to get at server-generated
+ default values in an atomic way.
+
+ An example of RETURNING, idiomatic to Postgresql, looks like::
+
+ INSERT INTO user_account (name) VALUES ('new name') RETURNING id, timestamp
+
+ Above, the INSERT statement will provide upon execution a result set
+ which includes the values of the columns ``user_account.id`` and
+ ``user_account.timestamp``, which above should have been generated as default
+ values as they are not included otherwise (but note any series of columns
+ or SQL expressions can be placed into RETURNING, not just default-value columns).
+
+ The backends that currently support
+ RETURNING or a similar construct are Postgresql, SQL Server, Oracle,
+ and Firebird. The Postgresql and Firebird implementations are generally
+ full featured, whereas the implementations of SQL Server and Oracle
+ have caveats. On SQL Server, the clause is known as "OUTPUT INSERTED"
+ for INSERT and UPDATE statements and "OUTPUT DELETED" for DELETE statements;
+ the key caveat is that triggers are not supported in conjunction with this
+ keyword. On Oracle, it is known as "RETURNING...INTO", and requires that the
+ value be placed into an OUT paramter, meaning not only is the syntax awkward,
+ but it can also only be used for one row at a time.
+
+ SQLAlchemy's :meth:`.UpdateBase.returning` system provides a layer of abstraction
+ on top of the RETURNING systems of these backends to provide a consistent
+ interface for returning columns. The ORM also includes many optimizations
+ that make use of RETURNING when available.
+
+ one to many
+ A style of :func:`~sqlalchemy.orm.relationship` which links
+ the primary key of the parent mapper's table to the foreign
+ key of a related table. Each unique parent object can
+ then refer to zero or more unique related objects.
+
+ The related objects in turn will have an implicit or
+ explicit :term:`many to one` relationship to their parent
+ object.
+
+ An example one to many schema (which, note, is identical
+ to the :term:`many to one` schema):
+
+ .. sourcecode:: sql
+
+ CREATE TABLE department (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE employee (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30),
+ dep_id INTEGER REFERENCES department(id)
+ )
+
+ The relationship from ``department`` to ``employee`` is
+ one to many, since many employee records can be associated with a
+ single department. A SQLAlchemy mapping might look like::
+
+ class Department(Base):
+ __tablename__ = 'department'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ employees = relationship("Employee")
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ dep_id = Column(Integer, ForeignKey('department.id'))
+
+ .. seealso::
+
+ :term:`relationship`
+
+ :term:`many to one`
+
+ :term:`backref`
+
+ many to one
+ A style of :func:`~sqlalchemy.orm.relationship` which links
+ a foreign key in the parent mapper's table to the primary
+ key of a related table. Each parent object can
+ then refer to exactly zero or one related object.
+
+ The related objects in turn will have an implicit or
+ explicit :term:`one to many` relationship to any number
+ of parent objects that refer to them.
+
+ An example many to one schema (which, note, is identical
+ to the :term:`one to many` schema):
+
+ .. sourcecode:: sql
+
+ CREATE TABLE department (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE employee (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30),
+ dep_id INTEGER REFERENCES department(id)
+ )
+
+
+ The relationship from ``employee`` to ``department`` is
+ many to one, since many employee records can be associated with a
+ single department. A SQLAlchemy mapping might look like::
+
+ class Department(Base):
+ __tablename__ = 'department'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ dep_id = Column(Integer, ForeignKey('department.id'))
+ department = relationship("Department")
+
+ .. seealso::
+
+ :term:`relationship`
+
+ :term:`one to many`
+
+ :term:`backref`
+
+ backref
+ bidirectional relationship
+ An extension to the :term:`relationship` system whereby two
+ distinct :func:`~sqlalchemy.orm.relationship` objects can be
+ mutually associated with each other, such that they coordinate
+ in memory as changes occur to either side. The most common
+ way these two relationships are constructed is by using
+ the :func:`~sqlalchemy.orm.relationship` function explicitly
+ for one side and specifying the ``backref`` keyword to it so that
+ the other :func:`~sqlalchemy.orm.relationship` is created
+ automatically. We can illustrate this against the example we've
+ used in :term:`one to many` as follows::
+
+ class Department(Base):
+ __tablename__ = 'department'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ employees = relationship("Employee", backref="department")
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ dep_id = Column(Integer, ForeignKey('department.id'))
+
+ A backref can be applied to any relationship, including one to many,
+ many to one, and :term:`many to many`.
+
+ .. seealso::
+
+ :term:`relationship`
+
+ :term:`one to many`
+
+ :term:`many to one`
+
+ :term:`many to many`
+
+ many to many
+ A style of :func:`sqlalchemy.orm.relationship` which links two tables together
+ via an intermediary table in the middle. Using this configuration,
+ any number of rows on the left side may refer to any number of
+ rows on the right, and vice versa.
+
+ A schema where employees can be associated with projects:
+
+ .. sourcecode:: sql
+
+ CREATE TABLE employee (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE project (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE employee_project (
+ employee_id INTEGER PRIMARY KEY,
+ project_id INTEGER PRIMARY KEY,
+ FOREIGN KEY employee_id REFERENCES employee(id),
+ FOREIGN KEY project_id REFERENCES project(id)
+ )
+
+ Above, the ``employee_project`` table is the many-to-many table,
+ which naturally forms a composite primary key consisting
+ of the primary key from each related table.
+
+ In SQLAlchemy, the :func:`sqlalchemy.orm.relationship` function
+ can represent this style of relationship in a mostly
+ transparent fashion, where the many-to-many table is
+ specified using plain table metadata::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key)
+ name = Column(String(30))
+
+ projects = relationship(
+ "Project",
+ secondary=Table('employee_project', Base.metadata,
+ Column("employee_id", Integer, ForeignKey('employee.id'),
+ primary_key=True),
+ Column("project_id", Integer, ForeignKey('project.id'),
+ primary_key=True)
+ ),
+ backref="employees"
+ )
+
+ class Project(Base):
+ __tablename__ = 'project'
+
+ id = Column(Integer, primary_key)
+ name = Column(String(30))
+
+ Above, the ``Employee.projects`` and back-referencing ``Project.employees``
+ collections are defined::
+
+ proj = Project(name="Client A")
+
+ emp1 = Employee(name="emp1")
+ emp2 = Employee(name="emp2")
+
+ proj.employees.extend([emp1, emp2])
+
+ .. seealso::
+
+ :term:`association relationship`
+
+ :term:`relationship`
+
+ :term:`one to many`
+
+ :term:`many to one`
+
+ relationship
+ relationships
+ A connecting unit between two mapped classes, corresponding
+ to some relationship between the two tables in the database.
+
+ The relationship is defined using the SQLAlchemy function
+ :func:`~sqlalchemy.orm.relationship`. Once created, SQLAlchemy
+ inspects the arguments and underlying mappings involved
+ in order to classify the relationship as one of three types:
+ :term:`one to many`, :term:`many to one`, or :term:`many to many`.
+ With this classification, the relationship construct
+ handles the task of persisting the appropriate linkages
+ in the database in response to in-memory object associations,
+ as well as the job of loading object references and collections
+ into memory based on the current linkages in the
+ database.
+
+ .. seealso::
+
+ :ref:`relationship_config_toplevel`
+
+ association relationship
+ A two-tiered :term:`relationship` which links two tables
+ together using an association table in the middle. The
+ association relationship differs from a :term:`many to many`
+ relationship in that the many-to-many table is mapped
+ by a full class, rather than invisibly handled by the
+ :func:`sqlalchemy.orm.relationship` construct as in the case
+ with many-to-many, so that additional attributes are
+ explicitly available.
+
+ For example, if we wanted to associate employees with
+ projects, also storing the specific role for that employee
+ with the project, the relational schema might look like:
+
+ .. sourcecode:: sql
+
+ CREATE TABLE employee (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE project (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(30)
+ )
+
+ CREATE TABLE employee_project (
+ employee_id INTEGER PRIMARY KEY,
+ project_id INTEGER PRIMARY KEY,
+ role_name VARCHAR(30),
+ FOREIGN KEY employee_id REFERENCES employee(id),
+ FOREIGN KEY project_id REFERENCES project(id)
+ )
+
+ A SQLAlchemy declarative mapping for the above might look like::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key)
+ name = Column(String(30))
+
+
+ class Project(Base):
+ __tablename__ = 'project'
+
+ id = Column(Integer, primary_key)
+ name = Column(String(30))
+
+
+ class EmployeeProject(Base):
+ __tablename__ = 'employee_project'
+
+ employee_id = Column(Integer, ForeignKey('employee.id'), primary_key=True)
+ project_id = Column(Integer, ForeignKey('project.id'), primary_key=True)
+ role_name = Column(String(30))
+
+ project = relationship("Project", backref="project_employees")
+ employee = relationship("Employee", backref="employee_projects")
+
+
+ Employees can be added to a project given a role name::
+
+ proj = Project(name="Client A")
+
+ emp1 = Employee(name="emp1")
+ emp2 = Employee(name="emp2")
+
+ proj.project_employees.extend([
+ EmployeeProject(employee=emp1, role="tech lead"),
+ EmployeeProject(employee=emp2, role="account executive")
+ ])
+
+ .. seealso::
+
+ :term:`many to many`
+
+ constraint
+ constraints
+ constrained
+ Rules established within a relational database that ensure
+ the validity and consistency of data. Common forms
+ of constraint include :term:`primary key constraint`,
+ :term:`foreign key constraint`, and :term:`check constraint`.
+
+ candidate key
+
+ A :term:`relational algebra` term referring to an attribute or set
+ of attributes that form a uniquely identifying key for a
+ row. A row may have more than one candidate key, each of which
+ is suitable for use as the primary key of that row.
+ The primary key of a table is always a candidate key.
+
+ .. seealso::
+
+ :term:`primary key`
+
+ http://en.wikipedia.org/wiki/Candidate_key
+
+ primary key
+ primary key constraint
+
+ A :term:`constraint` that uniquely defines the characteristics
+ of each :term:`row`. The primary key has to consist of
+ characteristics that cannot be duplicated by any other row.
+ The primary key may consist of a single attribute or
+ multiple attributes in combination.
+ (via Wikipedia)
+
+ The primary key of a table is typically, though not always,
+ defined within the ``CREATE TABLE`` :term:`DDL`:
+
+ .. sourcecode:: sql
+
+ CREATE TABLE employee (
+ emp_id INTEGER,
+ emp_name VARCHAR(30),
+ dep_id INTEGER,
+ PRIMARY KEY (emp_id)
+ )
+
+ .. seealso::
+
+ http://en.wikipedia.org/wiki/Primary_Key
+
+ foreign key constraint
+ A referential constraint between two tables. A foreign key is a field or set of fields in a
+ relational table that matches a :term:`candidate key` of another table.
+ The foreign key can be used to cross-reference tables.
+ (via Wikipedia)
+
+ A foreign key constraint can be added to a table in standard
+ SQL using :term:`DDL` like the following:
+
+ .. sourcecode:: sql
+
+ ALTER TABLE employee ADD CONSTRAINT dep_id_fk
+ FOREIGN KEY (employee) REFERENCES department (dep_id)
+
+ .. seealso::
+
+ http://en.wikipedia.org/wiki/Foreign_key_constraint
+
+ check constraint
+
+ A check constraint is a
+ condition that defines valid data when adding or updating an
+ entry in a table of a relational database. A check constraint
+ is applied to each row in the table.
+
+ (via Wikipedia)
+
+ A check constraint can be added to a table in standard
+ SQL using :term:`DDL` like the following:
+
+ .. sourcecode:: sql
+
+ ALTER TABLE distributors ADD CONSTRAINT zipchk CHECK (char_length(zipcode) = 5);
+
+ .. seealso::
+
+ http://en.wikipedia.org/wiki/Check_constraint
+
+ unique constraint
+ unique key index
+ A unique key index can uniquely identify each row of data
+ values in a database table. A unique key index comprises a
+ single column or a set of columns in a single database table.
+ No two distinct rows or data records in a database table can
+ have the same data value (or combination of data values) in
+ those unique key index columns if NULL values are not used.
+ Depending on its design, a database table may have many unique
+ key indexes but at most one primary key index.
+
+ (via Wikipedia)
+
+ .. seealso::
+
+ http://en.wikipedia.org/wiki/Unique_key#Defining_unique_keys
+
+ transient
+ This describes one of the four major object states which
+ an object can have within a :term:`session`; a transient object
+ is a new object that doesn't have any database identity
+ and has not been associated with a session yet. When the
+ object is added to the session, it moves to the
+ :term:`pending` state.
+
+ .. seealso::
+
+ :ref:`session_object_states`
+
+ pending
+ This describes one of the four major object states which
+ an object can have within a :term:`session`; a pending object
+ is a new object that doesn't have any database identity,
+ but has been recently associated with a session. When
+ the session emits a flush and the row is inserted, the
+ object moves to the :term:`persistent` state.
+
+ .. seealso::
+
+ :ref:`session_object_states`
+
+ persistent
+ This describes one of the four major object states which
+ an object can have within a :term:`session`; a persistent object
+ is an object that has a database identity (i.e. a primary key)
+ and is currently associated with a session. Any object
+ that was previously :term:`pending` and has now been inserted
+ is in the persistent state, as is any object that's
+ been loaded by the session from the database. When a
+ persistent object is removed from a session, it is known
+ as :term:`detached`.
+
+ .. seealso::
+
+ :ref:`session_object_states`
+
+ detached
+ This describes one of the four major object states which
+ an object can have within a :term:`session`; a detached object
+ is an object that has a database identity (i.e. a primary key)
+ but is not associated with any session. An object that
+ was previously :term:`persistent` and was removed from its
+ session either because it was expunged, or the owning
+ session was closed, moves into the detached state.
+ The detached state is generally used when objects are being
+ moved between sessions or when being moved to/from an external
+ object cache.
+
+ .. seealso::
+
+ :ref:`session_object_states`
diff --git a/doc/build/index.rst b/doc/build/index.rst
index c8ccc430c..716a83d0e 100644
--- a/doc/build/index.rst
+++ b/doc/build/index.rst
@@ -1,3 +1,5 @@
+:orphan:
+
.. _index_toplevel:
========================
@@ -11,6 +13,7 @@ A high level view and getting set up.
:ref:`Overview <overview>` |
:ref:`Installation Guide <installation>` |
+:doc:`Frequently Asked Questions <faq>` |
:doc:`Migration from 0.8 <changelog/migration_09>` |
:doc:`Glossary <glossary>` |
:doc:`Changelog catalog <changelog/index>`
@@ -36,6 +39,7 @@ of Python objects, proceed first to the tutorial.
:doc:`Declarative Extension <orm/extensions/declarative>` |
:doc:`Association Proxy <orm/extensions/associationproxy>` |
:doc:`Hybrid Attributes <orm/extensions/hybrid>` |
+ :doc:`Automap <orm/extensions/automap>` (**new**) |
:doc:`Mutable Scalars <orm/extensions/mutable>` |
:doc:`Ordered List <orm/extensions/orderinglist>`
@@ -74,11 +78,11 @@ are documented here. In contrast to the ORM's domain-centric mode of usage, the
:doc:`Connection Pooling <core/pooling>`
* **Schema Definition:**
- :ref:`Tables and Columns <metadata_describing>` |
- :ref:`Database Introspection (Reflection) <metadata_reflection>` |
- :ref:`Insert/Update Defaults <metadata_defaults>` |
- :ref:`Constraints and Indexes <metadata_constraints>` |
- :ref:`Using Data Definition Language (DDL) <metadata_ddl>`
+ :ref:`Tables and Columns <metadata_describing_toplevel>` |
+ :ref:`Database Introspection (Reflection) <metadata_reflection_toplevel>` |
+ :ref:`Insert/Update Defaults <metadata_defaults_toplevel>` |
+ :ref:`Constraints and Indexes <metadata_constraints_toplevel>` |
+ :ref:`Using Data Definition Language (DDL) <metadata_ddl_toplevel>`
* **Datatypes:**
:ref:`Overview <types_toplevel>` |
diff --git a/doc/build/intro.rst b/doc/build/intro.rst
index c5e7f7425..588701ce2 100644
--- a/doc/build/intro.rst
+++ b/doc/build/intro.rst
@@ -74,13 +74,12 @@ Supported Platforms
SQLAlchemy has been tested against the following platforms:
-* cPython since version 2.5, through the 2.xx series
+* cPython since version 2.6, through the 2.xx series
* cPython version 3, throughout all 3.xx series
-* `Jython <http://www.jython.org/>`_ 2.5 or greater
-* `Pypy <http://pypy.org/>`_ 1.5 or greater
+* `Pypy <http://pypy.org/>`_ 2.1 or greater
-.. versionchanged:: 0.8
- Python 2.5 is now the minimum Python version supported.
+.. versionchanged:: 0.9
+ Python 2.6 is now the minimum Python version supported.
Supported Installation Methods
-------------------------------
@@ -91,13 +90,9 @@ SQLAlchemy supports installation using standard Python "distutils" or
* **Plain Python Distutils** - SQLAlchemy can be installed with a clean
Python install using the services provided via `Python Distutils <http://docs.python.org/distutils/>`_,
using the ``setup.py`` script. The C extensions as well as Python 3 builds are supported.
-* **Standard Setuptools** - When using `setuptools <http://pypi.python.org/pypi/setuptools/>`_,
+* **Setuptools or Distribute** - When using `setuptools <http://pypi.python.org/pypi/setuptools/>`_,
SQLAlchemy can be installed via ``setup.py`` or ``easy_install``, and the C
- extensions are supported. setuptools is not supported on Python 3 at the time
- of this writing.
-* **Distribute** - With `distribute <http://pypi.python.org/pypi/distribute/>`_,
- SQLAlchemy can be installed via ``setup.py`` or ``easy_install``, and the C
- extensions as well as Python 3 builds are supported.
+ extensions are supported.
* **pip** - `pip <http://pypi.python.org/pypi/pip/>`_ is an installer that
rides on top of ``setuptools`` or ``distribute``, replacing the usage
of ``easy_install``. It is often preferred for its simpler mode of usage.
@@ -117,6 +112,11 @@ Or with pip::
This command will download the latest version of SQLAlchemy from the `Python
Cheese Shop <http://pypi.python.org/pypi/SQLAlchemy>`_ and install it to your system.
+.. note::
+
+ Beta releases of SQLAlchemy may not be present on Pypi, and may instead
+ require a direct download first.
+
Installing using setup.py
----------------------------------
@@ -128,8 +128,12 @@ Installing the C Extensions
----------------------------------
SQLAlchemy includes C extensions which provide an extra speed boost for
-dealing with result sets. Currently, the extensions are only supported on the
-2.xx series of cPython, not Python 3 or Pypy.
+dealing with result sets. The extensions are supported on both the 2.xx
+and 3.xx series of cPython.
+
+.. versionchanged:: 0.9.0
+
+ The C extensions now compile on Python 3 as well as Python 2.
setup.py will automatically build the extensions if an appropriate platform is
detected. If the build of the C extensions fails, due to missing compiler or
@@ -155,11 +159,12 @@ Or with pip::
Installing on Python 3
----------------------------------
-SQLAlchemy ships as Python 2 code. For Python 3 usage, the ``setup.py`` script
-will invoke the Python ``2to3`` tool on the build, plugging in an extra
-"preprocessor" as well. The 2to3 step works with Python distutils
-(part of the standard Python install) and Distribute - it will **not**
-work with a non-Distribute setuptools installation.
+SQLAlchemy runs directly on Python 2 or Python 3, and can be installed in
+either environment without any adjustments or code conversion.
+
+.. versionchanged:: 0.9.0 Python 3 is now supported in place with no 2to3 step
+ required.
+
Installing a Database API
----------------------------------
@@ -172,7 +177,7 @@ the available DBAPIs for each database, including external links.
Checking the Installed SQLAlchemy Version
------------------------------------------
-This documentation covers SQLAlchemy version 0.8. If you're working on a
+This documentation covers SQLAlchemy version 0.9. If you're working on a
system that already has SQLAlchemy installed, check the version from your
Python prompt like this:
@@ -180,11 +185,11 @@ Python prompt like this:
>>> import sqlalchemy
>>> sqlalchemy.__version__ # doctest: +SKIP
- 0.8.0
+ 0.9.0
.. _migration:
-0.7 to 0.8 Migration
+0.8 to 0.9 Migration
=====================
-Notes on what's changed from 0.7 to 0.8 is available here at :doc:`changelog/migration_08`.
+Notes on what's changed from 0.8 to 0.9 is available here at :doc:`changelog/migration_09`.
diff --git a/doc/build/orm/deprecated.rst b/doc/build/orm/deprecated.rst
index 943059747..8d277011c 100644
--- a/doc/build/orm/deprecated.rst
+++ b/doc/build/orm/deprecated.rst
@@ -1,3 +1,5 @@
+:orphan:
+
.. _dep_interfaces_orm_toplevel:
Deprecated ORM Event Interfaces
diff --git a/doc/build/orm/events.rst b/doc/build/orm/events.rst
index 235861952..2be74bf57 100644
--- a/doc/build/orm/events.rst
+++ b/doc/build/orm/events.rst
@@ -39,6 +39,8 @@ Session Events
Instrumentation Events
-----------------------
+.. automodule:: sqlalchemy.orm.instrumentation
+
.. autoclass:: sqlalchemy.orm.events.InstrumentationEvents
:members:
diff --git a/doc/build/orm/examples.rst b/doc/build/orm/examples.rst
index e0c87dadf..99ca4bb8d 100644
--- a/doc/build/orm/examples.rst
+++ b/doc/build/orm/examples.rst
@@ -1,140 +1,134 @@
.. _examples_toplevel:
-Examples
-========
+============
+ORM Examples
+============
The SQLAlchemy distribution includes a variety of code examples illustrating
a select set of patterns, some typical and some not so typical. All are
runnable and can be found in the ``/examples`` directory of the
-distribution. Each example contains a README in its ``__init__.py`` file,
-each of which are listed below.
+distribution. Descriptions and source code for all can be found here.
Additional SQLAlchemy examples, some user contributed, are available on the
wiki at `<http://www.sqlalchemy.org/trac/wiki/UsageRecipes>`_.
+
+Mapping Recipes
+===============
+
.. _examples_adjacencylist:
Adjacency List
--------------
-Location: /examples/adjacency_list/
-
-.. automodule:: adjacency_list
+.. automodule:: examples.adjacency_list
.. _examples_associations:
Associations
------------
-Location: /examples/association/
+.. automodule:: examples.association
-.. automodule:: association
+Directed Graphs
+---------------
+.. automodule:: examples.graphs
-.. _examples_instrumentation:
+Dynamic Relations as Dictionaries
+------------------------------------
-Attribute Instrumentation
--------------------------
+.. automodule:: examples.dynamic_dict
-Location: /examples/custom_attributes/
+.. _examples_generic_associations:
-.. automodule:: custom_attributes
+Generic Associations
+------------------------
-.. _examples_caching:
+.. automodule:: examples.generic_associations
-Dogpile Caching
----------------
+Large Collections
+------------------------
-Location: /examples/dogpile_caching/
+.. automodule:: examples.large_collection
-.. automodule:: dogpile_caching
+Nested Sets
+------------
-Directed Graphs
----------------
+.. automodule:: examples.nested_sets
-Location: /examples/graphs/
+.. _examples_relationships:
-.. automodule:: graphs
+Relationship Join Conditions
+----------------------------
-Dynamic Relations as Dictionaries
-----------------------------------
+.. automodule:: examples.join_conditions
-Location: /examples/dynamic_dict/
+.. _examples_xmlpersistence:
-.. automodule:: dynamic_dict
+XML Persistence
+------------------------
-.. _examples_generic_associations:
+.. automodule:: examples.elementtree
-Generic Associations
---------------------
+Versioning Objects
+------------------------
-Location: /examples/generic_associations
+Versioning with a History Table
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-.. automodule:: generic_associations
+.. automodule:: examples.versioned_history
-.. _examples_sharding:
+Versioning using Temporal Rows
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Horizontal Sharding
--------------------
+.. automodule:: examples.versioned_rows
-Location: /examples/sharding
+Vertical Attribute Mapping
+------------------------------------
-.. automodule:: sharding
+.. automodule:: examples.vertical
-Inheritance Mappings
---------------------
-Location: /examples/inheritance/
+Inheritance Mapping Recipes
+============================
-.. automodule:: inheritance
+Basic Inheritance Mappings
+----------------------------------
-Large Collections
------------------
+.. automodule:: examples.inheritance
-Location: /examples/large_collection/
+Special APIs
+============
-.. automodule:: large_collection
+.. _examples_instrumentation:
-Nested Sets
------------
+Attribute Instrumentation
+------------------------------------
-Location: /examples/nested_sets/
+.. automodule:: examples.custom_attributes
-.. automodule:: nested_sets
+.. _examples_sharding:
-Polymorphic Associations
+Horizontal Sharding
------------------------
-See :ref:`examples_generic_associations` for a modern version of polymorphic associations.
-
-.. _examples_postgis:
-
-PostGIS Integration
--------------------
-
-Location: /examples/postgis
-
-.. automodule:: postgis
-
-Versioned Objects
------------------
-
-Location: /examples/versioning
+.. automodule:: examples.sharding
-.. automodule:: versioning
+Extending the ORM
+=================
-Vertical Attribute Mapping
---------------------------
+.. _examples_caching:
-Location: /examples/vertical
+Dogpile Caching
+------------------------
-.. automodule:: vertical
+.. automodule:: examples.dogpile_caching
-.. _examples_xmlpersistence:
+.. _examples_postgis:
-XML Persistence
----------------
+PostGIS Integration
+------------------------
-Location: /examples/elementtree/
+.. automodule:: examples.postgis
-.. automodule:: elementtree
diff --git a/doc/build/orm/exceptions.rst b/doc/build/orm/exceptions.rst
index 1dde4248f..f95b26eed 100644
--- a/doc/build/orm/exceptions.rst
+++ b/doc/build/orm/exceptions.rst
@@ -2,5 +2,4 @@ ORM Exceptions
==============
.. automodule:: sqlalchemy.orm.exc
- :show-inheritance:
:members: \ No newline at end of file
diff --git a/doc/build/orm/extensions/associationproxy.rst b/doc/build/orm/extensions/associationproxy.rst
index 90bb29ebf..9b25c4a68 100644
--- a/doc/build/orm/extensions/associationproxy.rst
+++ b/doc/build/orm/extensions/associationproxy.rst
@@ -15,6 +15,7 @@ the construction of sophisticated collections and dictionary
views of virtually any geometry, persisted to the database using
standard, transparently configured relational patterns.
+
Simplifying Scalar Collections
------------------------------
diff --git a/doc/build/orm/extensions/automap.rst b/doc/build/orm/extensions/automap.rst
new file mode 100644
index 000000000..d1d200609
--- /dev/null
+++ b/doc/build/orm/extensions/automap.rst
@@ -0,0 +1,22 @@
+.. _automap_toplevel:
+
+Automap
+=======
+
+.. automodule:: sqlalchemy.ext.automap
+
+API Reference
+-------------
+
+.. autofunction:: automap_base
+
+.. autoclass:: AutomapBase
+ :members:
+
+.. autofunction:: classname_for_table
+
+.. autofunction:: name_for_scalar_relationship
+
+.. autofunction:: name_for_collection_relationship
+
+.. autofunction:: generate_relationship
diff --git a/doc/build/orm/extensions/declarative.rst b/doc/build/orm/extensions/declarative.rst
index 35895e8df..636bb451b 100644
--- a/doc/build/orm/extensions/declarative.rst
+++ b/doc/build/orm/extensions/declarative.rst
@@ -10,6 +10,8 @@ API Reference
.. autofunction:: declarative_base
+.. autofunction:: as_declarative
+
.. autoclass:: declared_attr
.. autofunction:: sqlalchemy.ext.declarative.api._declarative_constructor
@@ -27,3 +29,4 @@ API Reference
.. autoclass:: ConcreteBase
.. autoclass:: DeferredReflection
+ :members:
diff --git a/doc/build/orm/extensions/hybrid.rst b/doc/build/orm/extensions/hybrid.rst
index 3ee76fd9b..16cdafebc 100644
--- a/doc/build/orm/extensions/hybrid.rst
+++ b/doc/build/orm/extensions/hybrid.rst
@@ -15,7 +15,7 @@ API Reference
:members:
.. autoclass:: Comparator
- :show-inheritance:
+
.. autodata:: HYBRID_METHOD
diff --git a/doc/build/orm/extensions/instrumentation.rst b/doc/build/orm/extensions/instrumentation.rst
index 94946b1ae..16084e319 100644
--- a/doc/build/orm/extensions/instrumentation.rst
+++ b/doc/build/orm/extensions/instrumentation.rst
@@ -10,6 +10,8 @@ API Reference
.. autodata:: INSTRUMENTATION_MANAGER
+.. autoclass:: sqlalchemy.orm.instrumentation.InstrumentationFactory
+
.. autoclass:: InstrumentationManager
:members:
:undoc-members:
@@ -17,7 +19,6 @@ API Reference
.. autodata:: instrumentation_finders
.. autoclass:: ExtendedInstrumentationRegistry
- :show-inheritance:
:members:
diff --git a/doc/build/orm/extensions/mutable.rst b/doc/build/orm/extensions/mutable.rst
index ba3e10542..14875cd3c 100644
--- a/doc/build/orm/extensions/mutable.rst
+++ b/doc/build/orm/extensions/mutable.rst
@@ -12,15 +12,14 @@ API Reference
:members: _parents, coerce
.. autoclass:: Mutable
- :show-inheritance:
:members:
+ :inherited-members:
+ :private-members:
.. autoclass:: MutableComposite
- :show-inheritance:
:members:
.. autoclass:: MutableDict
- :show-inheritance:
:members:
diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst
index a82fcf675..e6c1e378b 100644
--- a/doc/build/orm/inheritance.rst
+++ b/doc/build/orm/inheritance.rst
@@ -478,8 +478,11 @@ Below we load ``Company`` rows while eagerly loading related ``Engineer``
objects, querying the ``employee`` and ``engineer`` tables simultaneously::
session.query(Company).\
- options(subqueryload_all(Company.employees.of_type(Engineer),
- Engineer.machines))
+ options(
+ subqueryload(Company.employees.of_type(Engineer)).
+ subqueryload("machines")
+ )
+ )
.. versionadded:: 0.8
:func:`.joinedload` and :func:`.subqueryload` support
diff --git a/doc/build/orm/internals.rst b/doc/build/orm/internals.rst
index 38efdb08a..250bc777d 100644
--- a/doc/build/orm/internals.rst
+++ b/doc/build/orm/internals.rst
@@ -10,62 +10,75 @@ sections, are listed here.
.. autoclass:: sqlalchemy.orm.state.AttributeState
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.instrumentation.ClassManager
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.properties.ColumnProperty
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.descriptor_props.CompositeProperty
:members:
- :show-inheritance:
+
+
+.. autoclass:: sqlalchemy.orm.attributes.Event
+ :members:
+
.. autoclass:: sqlalchemy.orm.interfaces._InspectionAttr
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.orm.state.InstanceState
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.orm.attributes.InstrumentedAttribute
:members: __get__, __set__, __delete__
- :show-inheritance:
+
:undoc-members:
+.. autodata:: sqlalchemy.orm.interfaces.MANYTOONE
+
+.. autodata:: sqlalchemy.orm.interfaces.MANYTOMANY
+
.. autoclass:: sqlalchemy.orm.interfaces.MapperProperty
:members:
- :show-inheritance:
.. autodata:: sqlalchemy.orm.interfaces.NOT_EXTENSION
+
+.. autodata:: sqlalchemy.orm.interfaces.ONETOMANY
+
.. autoclass:: sqlalchemy.orm.interfaces.PropComparator
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.properties.RelationshipProperty
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.descriptor_props.SynonymProperty
:members:
- :show-inheritance:
+
:inherited-members:
.. autoclass:: sqlalchemy.orm.query.QueryContext
:members:
- :show-inheritance:
+
.. autoclass:: sqlalchemy.orm.attributes.QueryableAttribute
:members:
- :show-inheritance:
:inherited-members:
+
+.. autoclass:: sqlalchemy.orm.session.UOWTransaction
+ :members:
+
diff --git a/doc/build/orm/loading.rst b/doc/build/orm/loading.rst
index e84179558..7be25de4a 100644
--- a/doc/build/orm/loading.rst
+++ b/doc/build/orm/loading.rst
@@ -1,3 +1,5 @@
+.. _loading_toplevel:
+
.. currentmodule:: sqlalchemy.orm
Relationship Loading Techniques
@@ -82,24 +84,25 @@ The default **loader strategy** for any :func:`~sqlalchemy.orm.relationship`
is configured by the ``lazy`` keyword argument, which defaults to ``select`` - this indicates
a "select" statement .
Below we set it as ``joined`` so that the ``children`` relationship is eager
-loading, using a join:
-
-.. sourcecode:: python+sql
+loaded using a JOIN::
# load the 'children' collection using LEFT OUTER JOIN
- mapper(Parent, parent_table, properties={
- 'children': relationship(Child, lazy='joined')
- })
+ class Parent(Base):
+ __tablename__ = 'parent'
+
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child", lazy='joined')
We can also set it to eagerly load using a second query for all collections,
-using ``subquery``:
+using ``subquery``::
-.. sourcecode:: python+sql
+ # load the 'children' collection using a second query which
+ # JOINS to a subquery of the original
+ class Parent(Base):
+ __tablename__ = 'parent'
- # load the 'children' attribute using a join to a subquery
- mapper(Parent, parent_table, properties={
- 'children': relationship(Child, lazy='subquery')
- })
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child", lazy='subquery')
When querying, all three choices of loader strategy are available on a
per-query basis, using the :func:`~sqlalchemy.orm.joinedload`,
@@ -117,42 +120,37 @@ query options:
# set children to load eagerly with a second statement
session.query(Parent).options(subqueryload('children')).all()
-To reference a relationship that is deeper than one level, separate the names by periods:
-
-.. sourcecode:: python+sql
-
- session.query(Parent).options(joinedload('foo.bar.bat')).all()
-
-When using dot-separated names with :func:`~sqlalchemy.orm.joinedload` or
-:func:`~sqlalchemy.orm.subqueryload`, the option applies **only** to the actual
-attribute named, and **not** its ancestors. For example, suppose a mapping
-from ``A`` to ``B`` to ``C``, where the relationships, named ``atob`` and
-``btoc``, are both lazy-loading. A statement like the following:
-
-.. sourcecode:: python+sql
-
- session.query(A).options(joinedload('atob.btoc')).all()
-
-will load only ``A`` objects to start. When the ``atob`` attribute on each
-``A`` is accessed, the returned ``B`` objects will *eagerly* load their ``C``
-objects.
-
-Therefore, to modify the eager load to load both ``atob`` as well as ``btoc``,
-place joinedloads for both:
-
-.. sourcecode:: python+sql
-
- session.query(A).options(joinedload('atob'), joinedload('atob.btoc')).all()
-
-or more succinctly just use :func:`~sqlalchemy.orm.joinedload_all` or
-:func:`~sqlalchemy.orm.subqueryload_all`:
-
-.. sourcecode:: python+sql
-
- session.query(A).options(joinedload_all('atob.btoc')).all()
-
-There are two other loader strategies available, **dynamic loading** and **no
-loading**; these are described in :ref:`largecollections`.
+Loading Along Paths
+-------------------
+
+To reference a relationship that is deeper than one level, method chaining
+may be used. The object returned by all loader options is an instance of
+the :class:`.Load` class, which provides a so-called "generative" interface::
+
+ session.query(Parent).options(
+ joinedload('foo').
+ joinedload('bar').
+ joinedload('bat')
+ ).all()
+
+Using method chaining, the loader style of each link in the path is explicitly
+stated. To navigate along a path without changing the existing loader style
+of a particular attribute, the :func:`.defaultload` method/function may be used::
+
+ session.query(A).options(
+ defaultload("atob").joinedload("btoc")
+ ).all()
+
+.. versionchanged:: 0.9.0
+ The previous approach of specifying dot-separated paths within loader
+ options has been superseded by the less ambiguous approach of the
+ :class:`.Load` object and related methods. With this system, the user
+ specifies the style of loading for each link along the chain explicitly,
+ rather than guessing between options like ``joinedload()`` vs. ``joinedload_all()``.
+ The :func:`.orm.defaultload` is provided to allow path navigation without
+ modification of existing loader options. The dot-separated path system
+ as well as the ``_all()`` functions will remain available for backwards-
+ compatibility indefinitely.
Default Loading Strategies
--------------------------
@@ -175,8 +173,9 @@ of all :func:`.relationship` constructs in use for that query,
except for those which use the ``'dynamic'`` style of loading.
If some relationships specify
``lazy='joined'`` or ``lazy='subquery'``, for example,
-using ``default_strategy(lazy='select')`` will unilaterally
-cause all those relationships to use ``'select'`` loading.
+using ``lazyload('*')`` will unilaterally
+cause all those relationships to use ``'select'`` loading, e.g. emit a
+SELECT statement when each attribute is accessed.
The option does not supercede loader options stated in the
query, such as :func:`.eagerload`,
@@ -191,6 +190,22 @@ for the ``widget`` relationship::
If multiple ``'*'`` options are passed, the last one overrides
those previously passed.
+Per-Entity Default Loading Strategies
+-------------------------------------
+
+.. versionadded:: 0.9.0
+ Per-entity default loader strategies.
+
+A variant of the default loader strategy is the ability to set the strategy
+on a per-entity basis. For example, if querying for ``User`` and ``Address``,
+we can instruct all relationships on ``Address`` only to use lazy loading
+by first applying the :class:`.Load` object, then specifying the ``*`` as a
+chained option::
+
+ session.query(User, Address).options(Load(Address).lazyload('*'))
+
+Above, all relationships on ``Address`` will be set to a lazy load.
+
.. _zen_of_eager_loading:
The Zen of Eager Loading
@@ -402,31 +417,27 @@ For this SQLAlchemy supplies the :func:`~sqlalchemy.orm.contains_eager()`
option. This option is used in the same manner as the
:func:`~sqlalchemy.orm.joinedload()` option except it is assumed that the
:class:`~sqlalchemy.orm.query.Query` will specify the appropriate joins
-explicitly. Below it's used with a ``from_statement`` load::
+explicitly. Below, we specify a join between ``User`` and ``Address``
+and addtionally establish this as the basis for eager loading of ``User.addresses``::
- # mapping is the users->addresses mapping
- mapper(User, users_table, properties={
- 'addresses': relationship(Address, addresses_table)
- })
+ class User(Base):
+ __tablename__ = 'user'
+ id = Column(Integer, primary_key=True)
+ addresses = relationship("Address")
- # define a query on USERS with an outer join to ADDRESSES
- statement = users_table.outerjoin(addresses_table).select().apply_labels()
+ class Address(Base):
+ __tablename__ = 'address'
- # construct a Query object which expects the "addresses" results
- query = session.query(User).options(contains_eager('addresses'))
-
- # get results normally
- r = query.from_statement(statement)
+ # ...
-It works just as well with an inline :meth:`.Query.join` or
-:meth:`.Query.outerjoin`::
+ q = session.query(User).join(User.addresses).\
+ options(contains_eager(User.addresses))
- session.query(User).outerjoin(User.addresses).options(contains_eager(User.addresses)).all()
If the "eager" portion of the statement is "aliased", the ``alias`` keyword
argument to :func:`~sqlalchemy.orm.contains_eager` may be used to indicate it.
-This is a string alias name or reference to an actual
-:class:`~sqlalchemy.sql.expression.Alias` (or other selectable) object:
+This is sent as a reference to an :func:`.aliased` or :class:`.Alias`
+construct:
.. sourcecode:: python+sql
@@ -444,10 +455,23 @@ This is a string alias name or reference to an actual
adalias.user_id AS adalias_user_id, adalias.email_address AS adalias_email_address, (...other columns...)
FROM users LEFT OUTER JOIN email_addresses AS email_addresses_1 ON users.user_id = email_addresses_1.user_id
-The ``alias`` argument is used only as a source of columns to match up to the
-result set. You can use it to match up the result to arbitrary label
-names in a string SQL statement, by passing a :func:`.select` which links those
-labels to the mapped :class:`.Table`::
+The path given as the argument to :func:`.contains_eager` needs
+to be a full path from the starting entity. For example if we were loading
+``Users->orders->Order->items->Item``, the string version would look like::
+
+ query(User).options(contains_eager('orders').contains_eager('items'))
+
+Or using the class-bound descriptor::
+
+ query(User).options(contains_eager(User.orders).contains_eager(Order.items))
+
+Advanced Usage with Arbitrary Statements
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ``alias`` argument can be more creatively used, in that it can be made
+to represent any set of arbitrary names to match up into a statement.
+Below it is linked to a :func:`.select` which links a set of column objects
+to a string SQL statement::
# label the columns of the addresses table
eager_columns = select([
@@ -463,24 +487,17 @@ labels to the mapped :class:`.Table`::
"from users left outer join addresses on users.user_id=addresses.user_id").\
options(contains_eager(User.addresses, alias=eager_columns))
-The path given as the argument to :func:`.contains_eager` needs
-to be a full path from the starting entity. For example if we were loading
-``Users->orders->Order->items->Item``, the string version would look like::
- query(User).options(contains_eager('orders', 'items'))
-
-Or using the class-bound descriptor::
-
- query(User).options(contains_eager(User.orders, Order.items))
-
-Relation Loader API
---------------------
+Relationship Loader API
+------------------------
.. autofunction:: contains_alias
.. autofunction:: contains_eager
+.. autofunction:: defaultload
+
.. autofunction:: eagerload
.. autofunction:: eagerload_all
diff --git a/doc/build/orm/mapper_config.rst b/doc/build/orm/mapper_config.rst
index 2560c6f41..17bd31a6f 100644
--- a/doc/build/orm/mapper_config.rst
+++ b/doc/build/orm/mapper_config.rst
@@ -88,13 +88,19 @@ named according to the name of the column itself (specifically, the ``key``
attribute of :class:`.Column`). This behavior can be
modified in several ways.
+.. _mapper_column_distinct_names:
+
Naming Columns Distinctly from Attribute Names
----------------------------------------------
A mapping by default shares the same name for a
-:class:`.Column` as that of the mapped attribute.
-The name assigned to the :class:`.Column` can be different,
-as we illustrate here in a Declarative mapping::
+:class:`.Column` as that of the mapped attribute - specifically
+it matches the :attr:`.Column.key` attribute on :class:`.Column`, which
+by default is the same as the :attr:`.Column.name`.
+
+The name assigned to the Python attribute which maps to
+:class:`.Column` can be different from either :attr:`.Column.name` or :attr:`.Column.key`
+just by assigning it that way, as we illustrate here in a Declarative mapping::
class User(Base):
__tablename__ = 'user'
@@ -120,14 +126,50 @@ with the desired key::
'name': user_table.c.user_name,
})
+In the next section we'll examine the usage of ``.key`` more closely.
+
+.. _mapper_automated_reflection_schemes:
+
+Automating Column Naming Schemes from Reflected Tables
+------------------------------------------------------
+
+In the previous section :ref:`mapper_column_distinct_names`, we showed how
+a :class:`.Column` explicitly mapped to a class can have a different attribute
+name than the column. But what if we aren't listing out :class:`.Column`
+objects explicitly, and instead are automating the production of :class:`.Table`
+objects using reflection (e.g. as described in :ref:`metadata_reflection_toplevel`)?
+In this case we can make use of the :meth:`.DDLEvents.column_reflect` event
+to intercept the production of :class:`.Column` objects and provide them
+with the :attr:`.Column.key` of our choice::
+
+ @event.listens_for(Table, "column_reflect")
+ def column_reflect(inspector, table, column_info):
+ # set column.key = "attr_<lower_case_name>"
+ column_info['key'] = "attr_%s" % column_info['name'].lower()
+
+With the above event, the reflection of :class:`.Column` objects will be intercepted
+with our event that adds a new ".key" element, such as in a mapping as below::
+
+ class MyClass(Base):
+ __table__ = Table("some_table", Base.metadata,
+ autoload=True, autoload_with=some_engine)
+
+If we want to qualify our event to only react for the specific :class:`.MetaData`
+object above, we can check for it in our event::
+
+ @event.listens_for(Table, "column_reflect")
+ def column_reflect(inspector, table, column_info):
+ if table.metadata is Base.metadata:
+ # set column.key = "attr_<lower_case_name>"
+ column_info['key'] = "attr_%s" % column_info['name'].lower()
+
.. _column_prefix:
Naming All Columns with a Prefix
--------------------------------
-A way to automate the assignment of a prefix to
-the mapped attribute names relative to the column name
-is to use ``column_prefix``::
+A quick approach to prefix column names, typically when mapping
+to an existing :class:`.Table` object, is to use ``column_prefix``::
class User(Base):
__table__ = user_table
@@ -136,9 +178,10 @@ is to use ``column_prefix``::
The above will place attribute names such as ``_user_id``, ``_user_name``,
``_password`` etc. on the mapped ``User`` class.
-The classical version of the above::
+This approach is uncommon in modern usage. For dealing with reflected
+tables, a more flexible approach is to use that described in
+:ref:`mapper_automated_reflection_schemes`.
- mapper(User, user_table, column_prefix='_')
Using column_property for column level options
-----------------------------------------------
@@ -308,23 +351,75 @@ separately when it is accessed::
photo3 = deferred(Column(Binary), group='photos')
You can defer or undefer columns at the :class:`~sqlalchemy.orm.query.Query`
-level using the :func:`.orm.defer` and :func:`.orm.undefer` query options::
+level using options, including :func:`.orm.defer` and :func:`.orm.undefer`::
from sqlalchemy.orm import defer, undefer
query = session.query(Book)
- query.options(defer('summary')).all()
- query.options(undefer('excerpt')).all()
+ query = query.options(defer('summary'))
+ query = query.options(undefer('excerpt'))
+ query.all()
-And an entire "deferred group", i.e. which uses the ``group`` keyword argument
-to :func:`.orm.deferred`, can be undeferred using
-:func:`.orm.undefer_group`, sending in the group name::
+:func:`.orm.deferred` attributes which are marked with a "group" can be undeferred
+using :func:`.orm.undefer_group`, sending in the group name::
from sqlalchemy.orm import undefer_group
query = session.query(Book)
query.options(undefer_group('photos')).all()
+Load Only Cols
+---------------
+
+An arbitrary set of columns can be selected as "load only" columns, which will
+be loaded while deferring all other columns on a given entity, using :func:`.orm.load_only`::
+
+ from sqlalchemy.orm import load_only
+
+ session.query(Book).options(load_only("summary", "excerpt"))
+
+.. versionadded:: 0.9.0
+
+Deferred Loading with Multiple Entities
+---------------------------------------
+
+To specify column deferral options within a :class:`.Query` that loads multiple types
+of entity, the :class:`.Load` object can specify which parent entity to start with::
+
+ from sqlalchemy.orm import Load
+
+ query = session.query(Book, Author).join(Book.author)
+ query = query.options(
+ Load(Book).load_only("summary", "excerpt"),
+ Load(Author).defer("bio")
+ )
+
+To specify column deferral options along the path of various relationships,
+the options support chaining, where the loading style of each relationship
+is specified first, then is chained to the deferral options. Such as, to load
+``Book`` instances, then joined-eager-load the ``Author``, then apply deferral
+options to the ``Author`` entity::
+
+ from sqlalchemy.orm import joinedload
+
+ query = session.query(Book)
+ query = query.options(
+ joinedload(Book.author).load_only("summary", "excerpt"),
+ )
+
+In the case where the loading style of parent relationships should be left
+unchanged, use :func:`.orm.defaultload`::
+
+ from sqlalchemy.orm import defaultload
+
+ query = session.query(Book)
+ query = query.options(
+ defaultload(Book.author).load_only("summary", "excerpt"),
+ )
+
+.. versionadded:: 0.9.0 support for :class:`.Load` and other options which
+ allow for better targeting of deferral options.
+
Column Deferral API
-------------------
@@ -332,6 +427,8 @@ Column Deferral API
.. autofunction:: defer
+.. autofunction:: load_only
+
.. autofunction:: undefer
.. autofunction:: undefer_group
@@ -570,7 +667,7 @@ issued when the ORM is populating the object::
assert '@' in address
return address
-Validators also receive collection events, when items are added to a
+Validators also receive collection append events, when items are added to a
collection::
from sqlalchemy.orm import validates
@@ -585,6 +682,51 @@ collection::
assert '@' in address.email
return address
+
+The validation function by default does not get emitted for collection
+remove events, as the typical expectation is that a value being discarded
+doesn't require validation. However, :func:`.validates` supports reception
+of these events by specifying ``include_removes=True`` to the decorator. When
+this flag is set, the validation function must receive an additional boolean
+argument which if ``True`` indicates that the operation is a removal::
+
+ from sqlalchemy.orm import validates
+
+ class User(Base):
+ # ...
+
+ addresses = relationship("Address")
+
+ @validates('addresses', include_removes=True)
+ def validate_address(self, key, address, is_remove):
+ if is_remove:
+ raise ValueError(
+ "not allowed to remove items from the collection")
+ else:
+ assert '@' in address.email
+ return address
+
+The case where mutually dependent validators are linked via a backref
+can also be tailored, using the ``include_backrefs=False`` option; this option,
+when set to ``False``, prevents a validation function from emitting if the
+event occurs as a result of a backref::
+
+ from sqlalchemy.orm import validates
+
+ class User(Base):
+ # ...
+
+ addresses = relationship("Address", backref='user')
+
+ @validates('addresses', include_backrefs=False)
+ def validate_address(self, key, address):
+ assert '@' in address.email
+ return address
+
+Above, if we were to assign to ``Address.user`` as in ``some_address.user = some_user``,
+the ``validate_address()`` function would *not* be emitted, even though an append
+occurs to ``some_user.addresses`` - the event is caused by a backref.
+
Note that the :func:`~.validates` decorator is a convenience function built on
top of attribute events. An application that requires more control over
configuration of attribute change behavior can make use of this system,
@@ -592,13 +734,13 @@ described at :class:`~.AttributeEvents`.
.. autofunction:: validates
-.. _synonyms:
+.. _mapper_hybrids:
Using Descriptors and Hybrids
-----------------------------
A more comprehensive way to produce modified behavior for an attribute is to
-use descriptors. These are commonly used in Python using the ``property()``
+use :term:`descriptors`. These are commonly used in Python using the ``property()``
function. The standard SQLAlchemy technique for descriptors is to create a
plain descriptor, and to have it read/write from a mapped attribute with a
different name. Below we illustrate this using Python 2.6-style properties::
@@ -722,14 +864,88 @@ attribute, a SQL function is rendered which produces the same effect:
Read more about Hybrids at :ref:`hybrids_toplevel`.
+.. _synonyms:
+
Synonyms
--------
-Synonyms are a mapper-level construct that applies expression behavior to a descriptor
-based attribute.
+Synonyms are a mapper-level construct that allow any attribute on a class
+to "mirror" another attribute that is mapped.
-.. versionchanged:: 0.7
- The functionality of synonym is superceded as of 0.7 by hybrid attributes.
+In the most basic sense, the synonym is an easy way to make a certain
+attribute available by an additional name::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+
+ id = Column(Integer, primary_key=True)
+ job_status = Column(String(50))
+
+ status = synonym("job_status")
+
+The above class ``MyClass`` has two attributes, ``.job_status`` and
+``.status`` that will behave as one attribute, both at the expression
+level::
+
+ >>> print MyClass.job_status == 'some_status'
+ my_table.job_status = :job_status_1
+
+ >>> print MyClass.status == 'some_status'
+ my_table.job_status = :job_status_1
+
+and at the instance level::
+
+ >>> m1 = MyClass(status='x')
+ >>> m1.status, m1.job_status
+ ('x', 'x')
+
+ >>> m1.job_status = 'y'
+ >>> m1.status, m1.job_status
+ ('y', 'y')
+
+The :func:`.synonym` can be used for any kind of mapped attribute that
+subclasses :class:`.MapperProperty`, including mapped columns and relationships,
+as well as synonyms themselves.
+
+Beyond a simple mirror, :func:`.synonym` can also be made to reference
+a user-defined :term:`descriptor`. We can supply our
+``status`` synonym with a ``@property``::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+
+ id = Column(Integer, primary_key=True)
+ status = Column(String(50))
+
+ @property
+ def job_status(self):
+ return "Status: " + self.status
+
+ job_status = synonym("status", descriptor=job_status)
+
+When using Declarative, the above pattern can be expressed more succinctly
+using the :func:`.synonym_for` decorator::
+
+ from sqlalchemy.ext.declarative import synonym_for
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+
+ id = Column(Integer, primary_key=True)
+ status = Column(String(50))
+
+ @synonym_for("status")
+ @property
+ def job_status(self):
+ return "Status: " + self.status
+
+While the :func:`.synonym` is useful for simple mirroring, the use case
+of augmenting attribute behavior with descriptors is better handled in modern
+usage using the :ref:`hybrid attribute <mapper_hybrids>` feature, which
+is more oriented towards Python descriptors. Techically, a :func:`.synonym`
+can do everything that a :class:`.hybrid_property` can do, as it also supports
+injection of custom SQL capabilities, but the hybrid is more straightforward
+to use in more complex situations.
.. autofunction:: synonym
@@ -770,6 +986,10 @@ class you provide.
in-place mutation is no longer automatic; see the section below on
enabling mutability to support tracking of in-place changes.
+.. versionchanged:: 0.9
+ Composites will return their object-form, rather than as individual columns,
+ when used in a column-oriented :class:`.Query` construct. See :ref:`migration_2824`.
+
A simple example represents pairs of columns as a ``Point`` object.
``Point`` represents such a pair as ``.x`` and ``.y``::
@@ -909,6 +1129,54 @@ the same expression that the base "greater than" does::
end = composite(Point, x2, y2,
comparator_factory=PointComparator)
+.. _bundles:
+
+Column Bundles
+===============
+
+The :class:`.Bundle` may be used to query for groups of columns under one
+namespace.
+
+.. versionadded:: 0.9.0
+
+The bundle allows columns to be grouped together::
+
+ from sqlalchemy.orm import Bundle
+
+ bn = Bundle('mybundle', MyClass.data1, MyClass.data2)
+ for row in session.query(bn).filter(bn.c.data1 == 'd1'):
+ print row.mybundle.data1, row.mybundle.data2
+
+The bundle can be subclassed to provide custom behaviors when results
+are fetched. The method :meth:`.Bundle.create_row_processor` is given
+the :class:`.Query` and a set of "row processor" functions at query execution
+time; these processor functions when given a result row will return the
+individual attribute value, which can then be adapted into any kind of
+return data structure. Below illustrates replacing the usual :class:`.KeyedTuple`
+return structure with a straight Python dictionary::
+
+ from sqlalchemy.orm import Bundle
+
+ class DictBundle(Bundle):
+ def create_row_processor(self, query, procs, labels):
+ """Override create_row_processor to return values as dictionaries"""
+ def proc(row, result):
+ return dict(
+ zip(labels, (proc(row, result) for proc in procs))
+ )
+ return proc
+
+A result from the above bundle will return dictionary values::
+
+ bn = DictBundle('mybundle', MyClass.data1, MyClass.data2)
+ for row in session.query(bn).filter(bn.c.data1 == 'd1'):
+ print row.mybundle['data1'], row.mybundle['data2']
+
+The :class:`.Bundle` construct is also integrated into the behavior
+of :func:`.composite`, where it is used to return composite attributes as objects
+when queried as individual attributes.
+
+
.. _maptojoin:
Mapping a Class against Multiple Tables
@@ -1055,6 +1323,9 @@ for each target table. SQLAlchemy refers to this as the "entity name"
pattern, which is described as a recipe at `Entity Name
<http://www.sqlalchemy.org/trac/wiki/UsageRecipes/EntityName>`_.
+
+.. _mapping_constructors:
+
Constructors and Object Initialization
=======================================
@@ -1110,6 +1381,251 @@ of these events.
.. autofunction:: reconstructor
+
+.. _mapper_version_counter:
+
+Configuring a Version Counter
+=============================
+
+The :class:`.Mapper` supports management of a :term:`version id column`, which
+is a single table column that increments or otherwise updates its value
+each time an ``UPDATE`` to the mapped table occurs. This value is checked each
+time the ORM emits an ``UPDATE`` or ``DELETE`` against the row to ensure that
+the value held in memory matches the database value.
+
+The purpose of this feature is to detect when two concurrent transactions
+are modifying the same row at roughly the same time, or alternatively to provide
+a guard against the usage of a "stale" row in a system that might be re-using
+data from a previous transaction without refreshing (e.g. if one sets ``expire_on_commit=False``
+with a :class:`.Session`, it is possible to re-use the data from a previous
+transaction).
+
+.. topic:: Concurrent transaction updates
+
+ When detecting concurrent updates within transactions, it is typically the
+ case that the database's transaction isolation level is below the level of
+ :term:`repeatable read`; otherwise, the transaction will not be exposed
+ to a new row value created by a concurrent update which conflicts with
+ the locally updated value. In this case, the SQLAlchemy versioning
+ feature will typically not be useful for in-transaction conflict detection,
+ though it still can be used for cross-transaction staleness detection.
+
+ The database that enforces repeatable reads will typically either have locked the
+ target row against a concurrent update, or is employing some form
+ of multi version concurrency control such that it will emit an error
+ when the transaction is committed. SQLAlchemy's version_id_col is an alternative
+ which allows version tracking to occur for specific tables within a transaction
+ that otherwise might not have this isolation level set.
+
+ .. seealso::
+
+ `Repeatable Read Isolation Level <http://www.postgresql.org/docs/9.1/static/transaction-iso.html#XACT-REPEATABLE-READ>`_ - Postgresql's implementation of repeatable read, including a description of the error condition.
+
+Simple Version Counting
+-----------------------
+
+The most straightforward way to track versions is to add an integer column
+to the mapped table, then establish it as the ``version_id_col`` within the
+mapper options::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ version_id = Column(Integer, nullable=False)
+ name = Column(String(50), nullable=False)
+
+ __mapper_args__ = {
+ "version_id_col": version_id
+ }
+
+Above, the ``User`` mapping tracks integer versions using the column
+``version_id``. When an object of type ``User`` is first flushed, the
+``version_id`` column will be given a value of "1". Then, an UPDATE
+of the table later on will always be emitted in a manner similar to the
+following::
+
+ UPDATE user SET version_id=:version_id, name=:name
+ WHERE user.id = :user_id AND user.version_id = :user_version_id
+ {"name": "new name", "version_id": 2, "user_id": 1, "user_version_id": 1}
+
+The above UPDATE statement is updating the row that not only matches
+``user.id = 1``, it also is requiring that ``user.version_id = 1``, where "1"
+is the last version identifier we've been known to use on this object.
+If a transaction elsewhere has modifed the row independently, this version id
+will no longer match, and the UPDATE statement will report that no rows matched;
+this is the condition that SQLAlchemy tests, that exactly one row matched our
+UPDATE (or DELETE) statement. If zero rows match, that indicates our version
+of the data is stale, and a :exc:`.StaleDataError` is raised.
+
+.. _custom_version_counter:
+
+Custom Version Counters / Types
+-------------------------------
+
+Other kinds of values or counters can be used for versioning. Common types include
+dates and GUIDs. When using an alternate type or counter scheme, SQLAlchemy
+provides a hook for this scheme using the ``version_id_generator`` argument,
+which accepts a version generation callable. This callable is passed the value of the current
+known version, and is expected to return the subsequent version.
+
+For example, if we wanted to track the versioning of our ``User`` class
+using a randomly generated GUID, we could do this (note that some backends
+support a native GUID type, but we illustrate here using a simple string)::
+
+ import uuid
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ version_uuid = Column(String(32))
+ name = Column(String(50), nullable=False)
+
+ __mapper_args__ = {
+ 'version_id_col':version_uuid,
+ 'version_id_generator':lambda version: uuid.uuid4().hex
+ }
+
+The persistence engine will call upon ``uuid.uuid4()`` each time a
+``User`` object is subject to an INSERT or an UPDATE. In this case, our
+version generation function can disregard the incoming value of ``version``,
+as the ``uuid4()`` function
+generates identifiers without any prerequisite value. If we were using
+a sequential versioning scheme such as numeric or a special character system,
+we could make use of the given ``version`` in order to help determine the
+subsequent value.
+
+.. seealso::
+
+ :ref:`custom_guid_type`
+
+.. _server_side_version_counter:
+
+Server Side Version Counters
+----------------------------
+
+The ``version_id_generator`` can also be configured to rely upon a value
+that is generated by the database. In this case, the database would need
+some means of generating new identifiers when a row is subject to an INSERT
+as well as with an UPDATE. For the UPDATE case, typically an update trigger
+is needed, unless the database in question supports some other native
+version identifier. The Postgresql database in particular supports a system
+column called `xmin <http://www.postgresql.org/docs/9.1/static/ddl-system-columns.html>`_
+which provides UPDATE versioning. We can make use
+of the Postgresql ``xmin`` column to version our ``User``
+class as follows::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50), nullable=False)
+ xmin = Column("xmin", Integer, system=True)
+
+ __mapper_args__ = {
+ 'version_id_col': xmin,
+ 'version_id_generator': False
+ }
+
+With the above mapping, the ORM will rely upon the ``xmin`` column for
+automatically providing the new value of the version id counter.
+
+.. topic:: creating tables that refer to system columns
+
+ In the above scenario, as ``xmin`` is a system column provided by Postgresql,
+ we use the ``system=True`` argument to mark it as a system-provided
+ column, omitted from the ``CREATE TABLE`` statement.
+
+
+The ORM typically does not actively fetch the values of database-generated
+values when it emits an INSERT or UPDATE, instead leaving these columns as
+"expired" and to be fetched when they are next accessed, unless the ``eager_defaults``
+:func:`.mapper` flag is set. However, when a
+server side version column is used, the ORM needs to actively fetch the newly
+generated value. This is so that the version counter is set up *before*
+any concurrent transaction may update it again. This fetching is also
+best done simultaneously within the INSERT or UPDATE statement using :term:`RETURNING`,
+otherwise if emitting a SELECT statement afterwards, there is still a potential
+race condition where the version counter may change before it can be fetched.
+
+When the target database supports RETURNING, an INSERT statement for our ``User`` class will look
+like this::
+
+ INSERT INTO "user" (name) VALUES (%(name)s) RETURNING "user".id, "user".xmin
+ {'name': 'ed'}
+
+Where above, the ORM can acquire any newly generated primary key values along
+with server-generated version identifiers in one statement. When the backend
+does not support RETURNING, an additional SELECT must be emitted for **every**
+INSERT and UPDATE, which is much less efficient, and also introduces the possibility of
+missed version counters::
+
+ INSERT INTO "user" (name) VALUES (%(name)s)
+ {'name': 'ed'}
+
+ SELECT "user".version_id AS user_version_id FROM "user" where
+ "user".id = :param_1
+ {"param_1": 1}
+
+It is *strongly recommended* that server side version counters only be used
+when absolutely necessary and only on backends that support :term:`RETURNING`,
+e.g. Postgresql, Oracle, SQL Server (though SQL Server has
+`major caveats <http://blogs.msdn.com/b/sqlprogrammability/archive/2008/07/11/update-with-output-clause-triggers-and-sqlmoreresults.aspx>`_ when triggers are used), Firebird.
+
+.. versionadded:: 0.9.0
+
+ Support for server side version identifier tracking.
+
+Programmatic or Conditional Version Counters
+---------------------------------------------
+
+When ``version_id_generator`` is set to False, we can also programmatically
+(and conditionally) set the version identifier on our object in the same way
+we assign any other mapped attribute. Such as if we used our UUID example, but
+set ``version_id_generator`` to ``False``, we can set the version identifier
+at our choosing::
+
+ import uuid
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ version_uuid = Column(String(32))
+ name = Column(String(50), nullable=False)
+
+ __mapper_args__ = {
+ 'version_id_col':version_uuid,
+ 'version_id_generator': False
+ }
+
+ u1 = User(name='u1', version_uuid=uuid.uuid4())
+
+ session.add(u1)
+
+ session.commit()
+
+ u1.name = 'u2'
+ u1.version_uuid = uuid.uuid4()
+
+ session.commit()
+
+We can update our ``User`` object without incrementing the version counter
+as well; the value of the counter will remain unchanged, and the UPDATE
+statement will still check against the previous value. This may be useful
+for schemes where only certain classes of UPDATE are sensitive to concurrency
+issues::
+
+ # will leave version_uuid unchanged
+ u1.name = 'u3'
+ session.commit()
+
+.. versionadded:: 0.9.0
+
+ Support for programmatic and conditional version identifier tracking.
+
+
Class Mapping API
=================
diff --git a/doc/build/orm/query.rst b/doc/build/orm/query.rst
index 73aa5c555..5e31d710f 100644
--- a/doc/build/orm/query.rst
+++ b/doc/build/orm/query.rst
@@ -13,7 +13,7 @@ For an in-depth introduction to querying with the SQLAlchemy ORM, please see the
The Query Object
----------------
-:class:`~.Query` is produced in terms of a given :class:`~.Session`, using the :func:`~.Query.query` function::
+:class:`~.Query` is produced in terms of a given :class:`~.Session`, using the :meth:`~.Session.query` method::
q = session.query(SomeMappedClass)
@@ -31,9 +31,15 @@ ORM-Specific Query Constructs
.. autoclass:: sqlalchemy.orm.util.AliasedInsp
+.. autoclass:: sqlalchemy.orm.query.Bundle
+ :members:
+
.. autoclass:: sqlalchemy.util.KeyedTuple
:members: keys, _fields, _asdict
+.. autoclass:: sqlalchemy.orm.strategy_options.Load
+ :members:
+
.. autofunction:: join
.. autofunction:: outerjoin
diff --git a/doc/build/orm/relationships.rst b/doc/build/orm/relationships.rst
index e98ec657c..67a41c808 100644
--- a/doc/build/orm/relationships.rst
+++ b/doc/build/orm/relationships.rst
@@ -967,7 +967,7 @@ load those ``Address`` objects which specify a city of "Boston"::
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String)
- addresses = relationship("Address",
+ boston_addresses = relationship("Address",
primaryjoin="and_(User.id==Address.user_id, "
"Address.city=='Boston')")
diff --git a/doc/build/orm/session.rst b/doc/build/orm/session.rst
index 3e35f02cb..c979586f2 100644
--- a/doc/build/orm/session.rst
+++ b/doc/build/orm/session.rst
@@ -198,196 +198,319 @@ at the same time).
.. _session_faq:
-.. _session_faq_whentocreate:
-
Session Frequently Asked Questions
-----------------------------------
-* When do I make a :class:`.sessionmaker` ?
-
- Just one time, somewhere in your application's global scope. It should be
- looked upon as part of your application's configuration. If your
- application has three .py files in a package, you could, for example,
- place the :class:`.sessionmaker` line in your ``__init__.py`` file; from
- that point on your other modules say "from mypackage import Session". That
- way, everyone else just uses :class:`.Session()`,
- and the configuration of that session is controlled by that central point.
-
- If your application starts up, does imports, but does not know what
- database it's going to be connecting to, you can bind the
- :class:`.Session` at the "class" level to the
- engine later on, using :meth:`.sessionmaker.configure`.
-
- In the examples in this section, we will frequently show the
- :class:`.sessionmaker` being created right above the line where we actually
- invoke :class:`.Session`. But that's just for
- example's sake! In reality, the :class:`.sessionmaker` would be somewhere
- at the module level. The calls to instantiate :class:`.Session`
- would then be placed at the point in the application where database
- conversations begin.
-
-* When do I construct a :class:`.Session`, when do I commit it, and when do I close it ?
-
- A :class:`.Session` is typically constructed at the beginning of a logical
- operation where database access is potentially anticipated.
-
- The :class:`.Session`, whenever it is used to talk to the database,
- begins a database transaction as soon as it starts communicating.
- Assuming the ``autocommit`` flag is left at its recommended default
- of ``False``, this transaction remains in progress until the :class:`.Session`
- is rolled back, committed, or closed. The :class:`.Session` will
- begin a new transaction if it is used again, subsequent to the previous
- transaction ending; from this it follows that the :class:`.Session`
- is capable of having a lifespan across many transactions, though only
- one at a time. We refer to these two concepts as **transaction scope**
- and **session scope**.
-
- The implication here is that the SQLAlchemy ORM is encouraging the
- developer to establish these two scopes in his or her application,
- including not only when the scopes begin and end, but also the
- expanse of those scopes, for example should a single
- :class:`.Session` instance be local to the execution flow within a
- function or method, should it be a global object used by the
- entire application, or somewhere in between these two.
-
- The burden placed on the developer to determine this scope is one
- area where the SQLAlchemy ORM necessarily has a strong opinion
- about how the database should be used. The unit-of-work pattern
- is specifically one of accumulating changes over time and flushing
- them periodically, keeping in-memory state in sync with what's
- known to be present in a local transaction. This pattern is only
- effective when meaningful transaction scopes are in place.
-
- It's usually not very hard to determine the best points at which
- to begin and end the scope of a :class:`.Session`, though the wide
- variety of application architectures possible can introduce
- challenging situations.
-
- A common choice is to tear down the :class:`.Session` at the same
- time the transaction ends, meaning the transaction and session scopes
- are the same. This is a great choice to start out with as it
- removes the need to consider session scope as separate from transaction
- scope.
-
- While there's no one-size-fits-all recommendation for how transaction
- scope should be determined, there are common patterns. Especially
- if one is writing a web application, the choice is pretty much established.
-
- A web application is the easiest case because such an appication is already
- constructed around a single, consistent scope - this is the **request**,
- which represents an incoming request from a browser, the processing
- of that request to formulate a response, and finally the delivery of that
- response back to the client. Integrating web applications with the
- :class:`.Session` is then the straightforward task of linking the
- scope of the :class:`.Session` to that of the request. The :class:`.Session`
- can be established as the request begins, or using a **lazy initialization**
- pattern which establishes one as soon as it is needed. The request
- then proceeds, with some system in place where application logic can access
- the current :class:`.Session` in a manner associated with how the actual
- request object is accessed. As the request ends, the :class:`.Session`
- is torn down as well, usually through the usage of event hooks provided
- by the web framework. The transaction used by the :class:`.Session`
- may also be committed at this point, or alternatively the application may
- opt for an explicit commit pattern, only committing for those requests
- where one is warranted, but still always tearing down the :class:`.Session`
- unconditionally at the end.
-
- Most web frameworks include infrastructure to establish a single
- :class:`.Session`, associated with the request, which is correctly
- constructed and torn down corresponding
- torn down at the end of a request. Such infrastructure pieces
- include products such as `Flask-SQLAlchemy <http://packages.python.org/Flask-SQLAlchemy/>`_,
- for usage in conjunction with the Flask web framework,
- and `Zope-SQLAlchemy <http://pypi.python.org/pypi/zope.sqlalchemy>`_,
- for usage in conjunction with the Pyramid and Zope frameworks.
- SQLAlchemy strongly recommends that these products be used as
- available.
-
- In those situations where integration libraries are not available,
- SQLAlchemy includes its own "helper" class known as
- :class:`.scoped_session`. A tutorial on the usage of this object
- is at :ref:`unitofwork_contextual`. It provides both a quick way
- to associate a :class:`.Session` with the current thread, as well as
- patterns to associate :class:`.Session` objects with other kinds of
- scopes.
-
- As mentioned before, for non-web applications there is no one clear
- pattern, as applications themselves don't have just one pattern
- of architecture. The best strategy is to attempt to demarcate
- "operations", points at which a particular thread begins to perform
- a series of operations for some period of time, which can be committed
- at the end. Some examples:
-
- * A background daemon which spawns off child forks
- would want to create a :class:`.Session` local to each child
- process work with that :class:`.Session` through the life of the "job"
- that the fork is handling, then tear it down when the job is completed.
-
- * For a command-line script, the application would create a single, global
- :class:`.Session` that is established when the program begins to do its
- work, and commits it right as the program is completing its task.
-
- * For a GUI interface-driven application, the scope of the :class:`.Session`
- may best be within the scope of a user-generated event, such as a button
- push. Or, the scope may correspond to explicit user interaction, such as
- the user "opening" a series of records, then "saving" them.
-
-* Is the Session a cache ?
-
- Yeee...no. It's somewhat used as a cache, in that it implements the
- identity map pattern, and stores objects keyed to their primary key.
- However, it doesn't do any kind of query caching. This means, if you say
- ``session.query(Foo).filter_by(name='bar')``, even if ``Foo(name='bar')``
- is right there, in the identity map, the session has no idea about that.
- It has to issue SQL to the database, get the rows back, and then when it
- sees the primary key in the row, *then* it can look in the local identity
- map and see that the object is already there. It's only when you say
- ``query.get({some primary key})`` that the
- :class:`~sqlalchemy.orm.session.Session` doesn't have to issue a query.
-
- Additionally, the Session stores object instances using a weak reference
- by default. This also defeats the purpose of using the Session as a cache.
-
- The :class:`.Session` is not designed to be a
- global object from which everyone consults as a "registry" of objects.
- That's more the job of a **second level cache**. SQLAlchemy provides
- a pattern for implementing second level caching using `dogpile.cache <http://dogpilecache.readthedocs.org/>`_,
- via the :ref:`examples_caching` example.
-
-* How can I get the :class:`~sqlalchemy.orm.session.Session` for a certain object ?
-
- Use the :meth:`~.Session.object_session` classmethod
- available on :class:`~sqlalchemy.orm.session.Session`::
-
- session = Session.object_session(someobject)
-
-* Is the session thread-safe?
-
- The :class:`.Session` is very much intended to be used in a
- **non-concurrent** fashion, which usually means in only one thread at a
- time.
-
- The :class:`.Session` should be used in such a way that one
- instance exists for a single series of operations within a single
- transaction. One expedient way to get this effect is by associating
- a :class:`.Session` with the current thread (see :ref:`unitofwork_contextual`
- for background). Another is to use a pattern
- where the :class:`.Session` is passed between functions and is otherwise
- not shared with other threads.
-
- The bigger point is that you should not *want* to use the session
- with multiple concurrent threads. That would be like having everyone at a
- restaurant all eat from the same plate. The session is a local "workspace"
- that you use for a specific set of tasks; you don't want to, or need to,
- share that session with other threads who are doing some other task.
-
- If there are in fact multiple threads participating
- in the same task, then you may consider sharing the session between
- those threads, though this would be an extremely unusual scenario.
- In this case it would be necessary
- to implement a proper locking scheme so that the :class:`.Session` is still not
- exposed to concurrent access.
+When do I make a :class:`.sessionmaker`?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Just one time, somewhere in your application's global scope. It should be
+looked upon as part of your application's configuration. If your
+application has three .py files in a package, you could, for example,
+place the :class:`.sessionmaker` line in your ``__init__.py`` file; from
+that point on your other modules say "from mypackage import Session". That
+way, everyone else just uses :class:`.Session()`,
+and the configuration of that session is controlled by that central point.
+
+If your application starts up, does imports, but does not know what
+database it's going to be connecting to, you can bind the
+:class:`.Session` at the "class" level to the
+engine later on, using :meth:`.sessionmaker.configure`.
+
+In the examples in this section, we will frequently show the
+:class:`.sessionmaker` being created right above the line where we actually
+invoke :class:`.Session`. But that's just for
+example's sake! In reality, the :class:`.sessionmaker` would be somewhere
+at the module level. The calls to instantiate :class:`.Session`
+would then be placed at the point in the application where database
+conversations begin.
+
+.. _session_faq_whentocreate:
+
+When do I construct a :class:`.Session`, when do I commit it, and when do I close it?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. topic:: tl;dr;
+
+ As a general rule, keep the lifecycle of the session **separate and
+ external** from functions and objects that access and/or manipulate
+ database data.
+
+A :class:`.Session` is typically constructed at the beginning of a logical
+operation where database access is potentially anticipated.
+
+The :class:`.Session`, whenever it is used to talk to the database,
+begins a database transaction as soon as it starts communicating.
+Assuming the ``autocommit`` flag is left at its recommended default
+of ``False``, this transaction remains in progress until the :class:`.Session`
+is rolled back, committed, or closed. The :class:`.Session` will
+begin a new transaction if it is used again, subsequent to the previous
+transaction ending; from this it follows that the :class:`.Session`
+is capable of having a lifespan across many transactions, though only
+one at a time. We refer to these two concepts as **transaction scope**
+and **session scope**.
+
+The implication here is that the SQLAlchemy ORM is encouraging the
+developer to establish these two scopes in their application,
+including not only when the scopes begin and end, but also the
+expanse of those scopes, for example should a single
+:class:`.Session` instance be local to the execution flow within a
+function or method, should it be a global object used by the
+entire application, or somewhere in between these two.
+
+The burden placed on the developer to determine this scope is one
+area where the SQLAlchemy ORM necessarily has a strong opinion
+about how the database should be used. The :term:`unit of work` pattern
+is specifically one of accumulating changes over time and flushing
+them periodically, keeping in-memory state in sync with what's
+known to be present in a local transaction. This pattern is only
+effective when meaningful transaction scopes are in place.
+
+It's usually not very hard to determine the best points at which
+to begin and end the scope of a :class:`.Session`, though the wide
+variety of application architectures possible can introduce
+challenging situations.
+
+A common choice is to tear down the :class:`.Session` at the same
+time the transaction ends, meaning the transaction and session scopes
+are the same. This is a great choice to start out with as it
+removes the need to consider session scope as separate from transaction
+scope.
+
+While there's no one-size-fits-all recommendation for how transaction
+scope should be determined, there are common patterns. Especially
+if one is writing a web application, the choice is pretty much established.
+
+A web application is the easiest case because such an appication is already
+constructed around a single, consistent scope - this is the **request**,
+which represents an incoming request from a browser, the processing
+of that request to formulate a response, and finally the delivery of that
+response back to the client. Integrating web applications with the
+:class:`.Session` is then the straightforward task of linking the
+scope of the :class:`.Session` to that of the request. The :class:`.Session`
+can be established as the request begins, or using a :term:`lazy initialization`
+pattern which establishes one as soon as it is needed. The request
+then proceeds, with some system in place where application logic can access
+the current :class:`.Session` in a manner associated with how the actual
+request object is accessed. As the request ends, the :class:`.Session`
+is torn down as well, usually through the usage of event hooks provided
+by the web framework. The transaction used by the :class:`.Session`
+may also be committed at this point, or alternatively the application may
+opt for an explicit commit pattern, only committing for those requests
+where one is warranted, but still always tearing down the :class:`.Session`
+unconditionally at the end.
+
+Most web frameworks include infrastructure to establish a single
+:class:`.Session`, associated with the request, which is correctly
+constructed and torn down corresponding
+torn down at the end of a request. Such infrastructure pieces
+include products such as `Flask-SQLAlchemy <http://packages.python.org/Flask-SQLAlchemy/>`_,
+for usage in conjunction with the Flask web framework,
+and `Zope-SQLAlchemy <http://pypi.python.org/pypi/zope.sqlalchemy>`_,
+for usage in conjunction with the Pyramid and Zope frameworks.
+SQLAlchemy strongly recommends that these products be used as
+available.
+
+In those situations where integration libraries are not available,
+SQLAlchemy includes its own "helper" class known as
+:class:`.scoped_session`. A tutorial on the usage of this object
+is at :ref:`unitofwork_contextual`. It provides both a quick way
+to associate a :class:`.Session` with the current thread, as well as
+patterns to associate :class:`.Session` objects with other kinds of
+scopes.
+
+As mentioned before, for non-web applications there is no one clear
+pattern, as applications themselves don't have just one pattern
+of architecture. The best strategy is to attempt to demarcate
+"operations", points at which a particular thread begins to perform
+a series of operations for some period of time, which can be committed
+at the end. Some examples:
+
+* A background daemon which spawns off child forks
+ would want to create a :class:`.Session` local to each child
+ process, work with that :class:`.Session` through the life of the "job"
+ that the fork is handling, then tear it down when the job is completed.
+
+* For a command-line script, the application would create a single, global
+ :class:`.Session` that is established when the program begins to do its
+ work, and commits it right as the program is completing its task.
+
+* For a GUI interface-driven application, the scope of the :class:`.Session`
+ may best be within the scope of a user-generated event, such as a button
+ push. Or, the scope may correspond to explicit user interaction, such as
+ the user "opening" a series of records, then "saving" them.
+
+As a general rule, the application should manage the lifecycle of the
+session *externally* to functions that deal with specific data. This is a
+fundamental separation of concerns which keeps data-specific operations
+agnostic of the context in which they access and manipulate that data.
+
+E.g. **don't do this**::
+
+ ### this is the **wrong way to do it** ###
+
+ class ThingOne(object):
+ def go(self):
+ session = Session()
+ try:
+ session.query(FooBar).update({"x": 5})
+ session.commit()
+ except:
+ session.rollback()
+ raise
+
+ class ThingTwo(object):
+ def go(self):
+ session = Session()
+ try:
+ session.query(Widget).update({"q": 18})
+ session.commit()
+ except:
+ session.rollback()
+ raise
+
+ def run_my_program():
+ ThingOne().go()
+ ThingTwo().go()
+
+Keep the lifecycle of the session (and usually the transaction)
+**separate and external**::
+
+ ### this is a **better** (but not the only) way to do it ###
+
+ class ThingOne(object):
+ def go(self, session):
+ session.query(FooBar).update({"x": 5})
+
+ class ThingTwo(object):
+ def go(self, session):
+ session.query(Widget).update({"q": 18})
+
+ def run_my_program():
+ session = Session()
+ try:
+ ThingOne().go(session)
+ ThingTwo().go(session)
+
+ session.commit()
+ except:
+ session.rollback()
+ raise
+ finally:
+ session.close()
+
+The advanced developer will try to keep the details of session, transaction
+and exception management as far as possible from the details of the program
+doing its work. For example, we can further separate concerns using a `context manager <http://docs.python.org/3/library/contextlib.html#contextlib.contextmanager>`_::
+
+ ### another way (but again *not the only way*) to do it ###
+
+ from contextlib import contextmanager
+
+ @contextmanager
+ def session_scope():
+ """Provide a transactional scope around a series of operations."""
+ session = Session()
+ try:
+ yield session
+ session.commit()
+ except:
+ session.rollback()
+ raise
+ finally:
+ session.close()
+
+
+ def run_my_program():
+ with session_scope() as session:
+ ThingOne().go(session)
+ ThingTwo().go(session)
+
+
+Is the Session a cache?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Yeee...no. It's somewhat used as a cache, in that it implements the
+:term:`identity map` pattern, and stores objects keyed to their primary key.
+However, it doesn't do any kind of query caching. This means, if you say
+``session.query(Foo).filter_by(name='bar')``, even if ``Foo(name='bar')``
+is right there, in the identity map, the session has no idea about that.
+It has to issue SQL to the database, get the rows back, and then when it
+sees the primary key in the row, *then* it can look in the local identity
+map and see that the object is already there. It's only when you say
+``query.get({some primary key})`` that the
+:class:`~sqlalchemy.orm.session.Session` doesn't have to issue a query.
+
+Additionally, the Session stores object instances using a weak reference
+by default. This also defeats the purpose of using the Session as a cache.
+
+The :class:`.Session` is not designed to be a
+global object from which everyone consults as a "registry" of objects.
+That's more the job of a **second level cache**. SQLAlchemy provides
+a pattern for implementing second level caching using `dogpile.cache <http://dogpilecache.readthedocs.org/>`_,
+via the :ref:`examples_caching` example.
+
+How can I get the :class:`~sqlalchemy.orm.session.Session` for a certain object?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Use the :meth:`~.Session.object_session` classmethod
+available on :class:`~sqlalchemy.orm.session.Session`::
+
+ session = Session.object_session(someobject)
+
+The newer :ref:`core_inspection_toplevel` system can also be used::
+
+ from sqlalchemy import inspect
+ session = inspect(someobject).session
+
+.. _session_faq_threadsafe:
+
+Is the session thread-safe?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The :class:`.Session` is very much intended to be used in a
+**non-concurrent** fashion, which usually means in only one thread at a
+time.
+
+The :class:`.Session` should be used in such a way that one
+instance exists for a single series of operations within a single
+transaction. One expedient way to get this effect is by associating
+a :class:`.Session` with the current thread (see :ref:`unitofwork_contextual`
+for background). Another is to use a pattern
+where the :class:`.Session` is passed between functions and is otherwise
+not shared with other threads.
+
+The bigger point is that you should not *want* to use the session
+with multiple concurrent threads. That would be like having everyone at a
+restaurant all eat from the same plate. The session is a local "workspace"
+that you use for a specific set of tasks; you don't want to, or need to,
+share that session with other threads who are doing some other task.
+
+Making sure the :class:`.Session` is only used in a single concurrent thread at a time
+is called a "share nothing" approach to concurrency. But actually, not
+sharing the :class:`.Session` implies a more significant pattern; it
+means not just the :class:`.Session` object itself, but
+also **all objects that are associated with that Session**, must be kept within
+the scope of a single concurrent thread. The set of mapped
+objects associated with a :class:`.Session` are essentially proxies for data
+within database rows accessed over a database connection, and so just like
+the :class:`.Session` itself, the whole
+set of objects is really just a large-scale proxy for a database connection
+(or connections). Ultimately, it's mostly the DBAPI connection itself that
+we're keeping away from concurrent access; but since the :class:`.Session`
+and all the objects associated with it are all proxies for that DBAPI connection,
+the entire graph is essentially not safe for concurrent access.
+
+If there are in fact multiple threads participating
+in the same task, then you may consider sharing the session and its objects between
+those threads; however, in this extremely unusual scenario the application would
+need to ensure that a proper locking scheme is implemented so that there isn't
+*concurrent* access to the :class:`.Session` or its state. A more common approach
+to this situation is to maintain a single :class:`.Session` per concurrent thread,
+but to instead *copy* objects from one :class:`.Session` to another, often
+using the :meth:`.Session.merge` method to copy the state of an object into
+a new object local to a different :class:`.Session`.
Querying
--------
@@ -679,6 +802,8 @@ into the Session's list of objects to be marked as deleted::
# commit (or flush)
session.commit()
+.. _session_deleting_from_collections:
+
Deleting from Collections
~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1472,6 +1597,8 @@ flush/commit operation, the ``value`` attribute on ``someobject`` above is
expired, so that when next accessed the newly generated value will be loaded
from the database.
+.. _session_sql_expressions:
+
Using SQL Expressions with Sessions
====================================
@@ -1521,8 +1648,8 @@ proper context for the desired engine::
.. _session_external_transaction:
-Joining a Session into an External Transaction
-===============================================
+Joining a Session into an External Transaction (such as for test suites)
+========================================================================
If a :class:`.Connection` is being used which is already in a transactional
state (i.e. has a :class:`.Transaction` established), a :class:`.Session` can
@@ -1559,11 +1686,12 @@ entire database interaction is rolled back::
self.session.commit()
def tearDown(self):
+ self.session.close()
+
# rollback - everything that happened with the
# Session above (including calls to commit())
# is rolled back.
self.trans.rollback()
- self.session.close()
# return connection to the Engine
self.connection.close()
@@ -1575,6 +1703,42 @@ nested begin/commit-or-rollback pairs where only the outermost begin/commit
pair actually commits the transaction, or if the outermost block rolls back,
everything is rolled back.
+.. topic:: Supporting Tests with Rollbacks
+
+ The above recipe works well for any kind of database enabled test, except
+ for a test that needs to actually invoke :meth:`.Session.rollback` within
+ the scope of the test itself. The above recipe can be expanded, such
+ that the :class:`.Session` always runs all operations within the scope
+ of a SAVEPOINT, which is established at the start of each transaction,
+ so that tests can also rollback the "transaction" as well while still
+ remaining in the scope of a larger "transaction" that's never committed,
+ using two extra events::
+
+ from sqlalchemy import event
+
+ class SomeTest(TestCase):
+ def setUp(self):
+ # connect to the database
+ self.connection = engine.connect()
+
+ # begin a non-ORM transaction
+ self.trans = connection.begin()
+
+ # bind an individual Session to the connection
+ self.session = Session(bind=self.connection)
+
+ # start the session in a SAVEPOINT...
+ self.session.begin_nested()
+
+ # then each time that SAVEPOINT ends, reopen it
+ @event.listens_for(self.session, "after_transaction_end")
+ def restart_savepoint(session, transaction):
+ if transaction.nested and not transaction._parent.nested:
+ session.begin_nested()
+
+
+ # ... the tearDown() method stays the same
+
.. _unitofwork_contextual:
Contextual/Thread-local Sessions
@@ -1636,7 +1800,7 @@ we call upon the registry a second time, we get back the **same** :class:`.Sessi
This pattern allows disparate sections of the application to call upon a global
:class:`.scoped_session`, so that all those areas may share the same session
without the need to pass it explicitly. The :class:`.Session` we've established
-in our registry will remain, until we explicitly tell our regsitry to dispose of it,
+in our registry will remain, until we explicitly tell our registry to dispose of it,
by calling :meth:`.scoped_session.remove`::
>>> Session.remove()
@@ -1928,12 +2092,10 @@ Session and sessionmaker()
.. autoclass:: sessionmaker
:members:
- :show-inheritance:
:inherited-members:
.. autoclass:: sqlalchemy.orm.session.Session
:members:
- :show-inheritance:
:inherited-members:
.. autoclass:: sqlalchemy.orm.session.SessionTransaction
diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst
index 9686d7c85..aa9a51178 100644
--- a/doc/build/orm/tutorial.rst
+++ b/doc/build/orm/tutorial.rst
@@ -8,8 +8,7 @@ The SQLAlchemy Object Relational Mapper presents a method of associating
user-defined Python classes with database tables, and instances of those
classes (objects) with rows in their corresponding tables. It includes a
system that transparently synchronizes all changes in state between objects
-and their related rows, called a `unit of work
-<http://martinfowler.com/eaaCatalog/unitOfWork.html>`_, as well as a system
+and their related rows, called a :term:`unit of work`, as well as a system
for expressing database queries in terms of the user defined classes and their
defined relationships between each other.
@@ -23,8 +22,7 @@ example of applied usage of the Expression Language.
While there is overlap among the usage patterns of the ORM and the Expression
Language, the similarities are more superficial than they may at first appear.
One approaches the structure and content of data from the perspective of a
-user-defined `domain model
-<http://en.wikipedia.org/wiki/Domain_model>`_ which is transparently
+user-defined :term:`domain model` which is transparently
persisted and refreshed from its underlying storage model. The other
approaches it from the perspective of literal schema and SQL expression
representations which are explicitly composed into messages consumed
@@ -42,11 +40,11 @@ following text represents the expected return value.
Version Check
=============
-A quick check to verify that we are on at least **version 0.8** of SQLAlchemy::
+A quick check to verify that we are on at least **version 0.9** of SQLAlchemy::
>>> import sqlalchemy
>>> sqlalchemy.__version__ # doctest:+SKIP
- 0.8.0
+ 0.9.0
Connecting
==========
@@ -65,26 +63,21 @@ the SQL behind a popup window so it doesn't get in our way; just click the
"SQL" links to see what's being generated.
The return value of :func:`.create_engine` is an instance of :class:`.Engine`, and it represents
-the core interface to the database, adapted through a **dialect** that handles the details
-of the database and DBAPI in use. In this case the SQLite dialect will interpret instructions
+the core interface to the database, adapted through a :term:`dialect` that handles the details
+of the database and :term:`DBAPI` in use. In this case the SQLite dialect will interpret instructions
to the Python built-in ``sqlite3`` module.
-The :class:`.Engine` has not actually tried to connect to the database yet; that happens
-only the first time it is asked to perform a task against the database. We can illustrate
-this by asking it to perform a simple SELECT statement:
+.. sidebar:: Lazy Connecting
-.. sourcecode:: python+sql
+ The :class:`.Engine`, when first returned by :func:`.create_engine`,
+ has not actually tried to connect to the database yet; that happens
+ only the first time it is asked to perform a task against the database.
- {sql}>>> engine.execute("select 1").scalar()
- select 1
- ()
- {stop}1
-
-As the :meth:`.Engine.execute` method is called, the :class:`.Engine` establishes a connection to the
-SQLite database, which is then used to emit the SQL. The connection is then returned to an internal
-connection pool where it will be reused on subsequent statement executions. While we illustrate direct usage of the
-:class:`.Engine` here, this isn't typically necessary when using the ORM, where the :class:`.Engine`,
-once created, is used behind the scenes by the ORM as we'll see shortly.
+The first time a method like :meth:`.Engine.execute` or :meth:`.Engine.connect`
+is called, the :class:`.Engine` establishes a real :term:`DBAPI` connection to the
+database, which is then used to emit the SQL. When using the ORM, we typically
+don't use the :class:`.Engine` directly once created; instead, it's used
+behind the scenes by the ORM as we'll see shortly.
Declare a Mapping
=================
@@ -111,11 +104,9 @@ function, as follows::
Now that we have a "base", we can define any number of mapped classes in terms
of it. We will start with just a single table called ``users``, which will store
records for the end-users using our application.
-A new class called ``User`` will be the class to which we map this table. The
-imports we'll need to accomplish this include objects that represent the components
-of our table, including the :class:`.Column` class which represents a database column,
-as well as the :class:`.Integer` and :class:`.String` classes that
-represent basic datatypes used in columns::
+A new class called ``User`` will be the class to which we map this table. Within
+the class, we define details about the table to which we'll be mapping, primarily
+the table name, and names and datatypes of columns::
>>> from sqlalchemy import Column, Integer, String
>>> class User(Base):
@@ -126,71 +117,84 @@ represent basic datatypes used in columns::
... fullname = Column(String)
... password = Column(String)
...
- ... def __init__(self, name, fullname, password):
- ... self.name = name
- ... self.fullname = fullname
- ... self.password = password
- ...
... def __repr__(self):
- ... return "<User('%s','%s', '%s')>" % (self.name, self.fullname, self.password)
-
-The above ``User`` class establishes details about the table being mapped, including the name of the table denoted
-by the ``__tablename__`` attribute, a set of columns ``id``, ``name``, ``fullname`` and ``password``,
-where the ``id`` column will also be the primary key of the table. While its certainly possible
-that some database tables don't have primary key columns (as is also the case with views, which can
-also be mapped), the ORM in order to actually map to a particular table needs there
-to be at least one column denoted as a primary key column; multiple-column, i.e. composite, primary keys
-are of course entirely feasible as well.
-
-We define a constructor via ``__init__()`` and also a ``__repr__()`` method - both are optional. The
-class of course can have any number of other methods and attributes as required by the application,
-as it's basically just a plain Python class. Inheriting from ``Base`` is also only a requirement
-of the declarative configurational system, which itself is optional and relatively open ended; at its
-core, the SQLAlchemy ORM only requires that a class be a so-called "new style class", that is, it inherits
-from ``object`` in Python 2, in order to be mapped. All classes in Python 3 are "new style" classes.
-
-.. topic:: The Non Opinionated Philosophy
-
- In our ``User`` mapping example, it was required that we identify the name of the table
- in use, as well as the names and characteristics of all columns which we care about,
- including which column or columns
- represent the primary key, as well as some basic information about the types in use.
- SQLAlchemy never makes assumptions about these decisions - the developer must
- always be explicit about specific conventions in use. However, that doesn't mean the
- task can't be automated. While this tutorial will keep things explicit, developers are
- encouraged to make use of helper functions as well as "Declarative Mixins" to
- automate their tasks in large scale applications. The section :ref:`declarative_mixins`
- introduces many of these techniques.
+ ... return "<User(name='%s', fullname='%s', password='%s')>" % (
+ ... self.name, self.fullname, self.password)
+
+.. sidebar:: Tip
+
+ The ``User`` class defines a ``__repr__()`` method,
+ but note that is **optional**; we only implement it in
+ this tutorial so that our examples show nicely
+ formatted ``User`` objects.
+
+A class using Declarative at a minimum
+needs a ``__tablename__`` attribute, and at least one
+:class:`.Column` which is part of a primary key [#]_. SQLAlchemy never makes any
+assumptions by itself about the table to which
+a class refers, including that it has no built-in conventions for names,
+datatypes, or constraints. But this doesn't mean
+boilerplate is required; instead, you're encouraged to create your
+own automated conventions using helper functions and mixin classes, which
+is described in detail at :ref:`declarative_mixins`.
+
+When our class is constructed, Declarative replaces all the :class:`.Column`
+objects with special Python accessors known as :term:`descriptors`; this is a
+process known as :term:`instrumentation`. The "instrumented" mapped class
+will provide us with the means to refer to our table in a SQL context as well
+as to persist and load the values of columns from the database.
+
+Outside of what the mapping process does to our class, the class remains
+otherwise mostly a normal Python class, to which we can define any
+number of ordinary attributes and methods needed by our application.
+
+.. [#] For information on why a primary key is required, see
+ :ref:`faq_mapper_primary_key`.
+
+
+Create a Schema
+===============
With our ``User`` class constructed via the Declarative system, we have defined information about
-our table, known as **table metadata**, as well as a user-defined class which is linked to this
-table, known as a **mapped class**. Declarative has provided for us a shorthand system for what in SQLAlchemy is
-called a "Classical Mapping", which specifies these two units separately and is discussed
-in :ref:`classical_mapping`. The table
-is actually represented by a datastructure known as :class:`.Table`, and the mapping represented
-by a :class:`.Mapper` object generated by a function called :func:`.mapper`. Declarative performs both of
-these steps for us, making available the
-:class:`.Table` it has created via the ``__table__`` attribute::
+our table, known as :term:`table metadata`. The object used by SQLAlchemy to represent
+this information for a specific table is called the :class:`.Table` object, and here Declarative has made
+one for us. We can see this object by inspecting the ``__table__`` attribute::
>>> User.__table__ # doctest: +NORMALIZE_WHITESPACE
- Table('users', MetaData(None),
+ Table('users', MetaData(bind=None),
Column('id', Integer(), table=<users>, primary_key=True, nullable=False),
Column('name', String(), table=<users>),
Column('fullname', String(), table=<users>),
Column('password', String(), table=<users>), schema=None)
-and while rarely needed, making available the :class:`.Mapper` object via the ``__mapper__`` attribute::
-
- >>> User.__mapper__ # doctest: +ELLIPSIS
- <Mapper at 0x...; User>
-
-The Declarative base class also contains a catalog of all the :class:`.Table` objects
-that have been defined called :class:`.MetaData`, available via the ``.metadata``
-attribute. In this example, we are defining
-new tables that have yet to be created in our SQLite database, so one helpful feature
-the :class:`.MetaData` object offers is the ability to issue CREATE TABLE statements
-to the database for all tables that don't yet exist. We illustrate this
-by calling the :meth:`.MetaData.create_all` method, passing in our :class:`.Engine`
+.. sidebar:: Classical Mappings
+
+ The Declarative system, though highly recommended,
+ is not required in order to use SQLAlchemy's ORM.
+ Outside of Declarative, any
+ plain Python class can be mapped to any :class:`.Table`
+ using the :func:`.mapper` function directly; this
+ less common usage is described at :ref:`classical_mapping`.
+
+When we declared our class, Declarative used a Python metaclass in order to
+perform additional activities once the class declaration was complete; within
+this phase, it then created a :class:`.Table` object according to our
+specifications, and associated it with the class by constructing
+a :class:`.Mapper` object. This object is a behind-the-scenes object we normally
+don't need to deal with directly (though it can provide plenty of information
+about our mapping when we need it).
+
+The :class:`.Table` object is a member of a larger collection
+known as :class:`.MetaData`. When using Declarative,
+this object is available using the ``.metadata``
+attribute of our declarative base class.
+
+The :class:`.MetaData`
+is a :term:`registry` which includes the ability to emit a limited set
+of schema generation commands to the database. As our SQLite database
+does not actually have a ``users`` table present, we can use :class:`.MetaData`
+to issue CREATE TABLE statements to the database for all tables that don't yet exist.
+Below, we call the :meth:`.MetaData.create_all` method, passing in our :class:`.Engine`
as a source of database connectivity. We will see that special commands are
first emitted to check for the presence of the ``users`` table, and following that
the actual ``CREATE TABLE`` statement:
@@ -242,13 +246,9 @@ the actual ``CREATE TABLE`` statement:
fullname = Column(String(50))
password = Column(String(12))
- def __init__(self, name, fullname, password):
- self.name = name
- self.fullname = fullname
- self.password = password
-
def __repr__(self):
- return "<User('%s','%s', '%s')>" % (self.name, self.fullname, self.password)
+ return "<User(name='%s', fullname='%s', password='%s')>" % (
+ self.name, self.fullname, self.password)
We include this more verbose table definition separately
to highlight the difference between a minimal construct geared primarily
@@ -261,7 +261,7 @@ Create an Instance of the Mapped Class
With mappings complete, let's now create and inspect a ``User`` object::
- >>> ed_user = User('ed', 'Ed Jones', 'edspassword')
+ >>> ed_user = User(name='ed', fullname='Ed Jones', password='edspassword')
>>> ed_user.name
'ed'
>>> ed_user.password
@@ -269,41 +269,23 @@ With mappings complete, let's now create and inspect a ``User`` object::
>>> str(ed_user.id)
'None'
-The ``id`` attribute, which while not defined by our ``__init__()`` method,
-exists with a value of ``None`` on our ``User`` instance due to the ``id``
-column we declared in our mapping. By
-default, the ORM creates class attributes for all columns present
-in the table being mapped. These class attributes exist as
-:term:`descriptors`, and
-define **instrumentation** for the mapped class. The
-functionality of this instrumentation includes the ability to fire on change
-events, track modifications, and to automatically load new data from the database when
-needed.
-
-Since we have not yet told SQLAlchemy to persist ``Ed Jones`` within the
-database, its id is ``None``. When we persist the object later, this attribute
-will be populated with a newly generated value.
-
-.. topic:: The default ``__init__()`` method
-
- Note that in our ``User`` example we supplied an ``__init__()`` method,
- which receives ``name``, ``fullname`` and ``password`` as positional arguments.
- The Declarative system supplies for us a default constructor if one is
- not already present, which accepts keyword arguments of the same name
- as that of the mapped attributes. Below we define ``User`` without
- specifying a constructor::
-
- class User(Base):
- __tablename__ = 'users'
- id = Column(Integer, primary_key=True)
- name = Column(String)
- fullname = Column(String)
- password = Column(String)
-
- Our ``User`` class above will make usage of the default constructor, and provide
- ``id``, ``name``, ``fullname``, and ``password`` as keyword arguments::
-
- u1 = User(name='ed', fullname='Ed Jones', password='foobar')
+
+.. sidebar:: the ``__init__()`` method
+
+ Our ``User`` class, as defined using the Declarative system, has
+ been provided with a constructor (e.g. ``__init__()`` method) which automatically
+ accepts keyword names that match the columns we've mapped. We are free
+ to define any explicit ``__init__()`` method we prefer on our class, which
+ will override the default method provided by Declarative.
+
+Even though we didn't specify it in the constructor, the ``id`` attribute
+still produces a value of ``None`` when we access it (as opposed to Python's
+usual behavior of raising ``AttributeError`` for an undefined attribute).
+SQLAlchemy's :term:`instrumentation` normally produces this default value for
+column-mapped attributes when first accessed. For those attributes where
+we've actually assigned a value, the instrumentation system is tracking
+those assignments for use within an eventual INSERT statement to be emitted to the
+database.
Creating a Session
==================
@@ -330,10 +312,21 @@ connect it to the :class:`~sqlalchemy.orm.session.Session` using
>>> Session.configure(bind=engine) # once engine is available
+.. sidebar:: Session Lifecycle Patterns
+
+ The question of when to make a :class:`.Session` depends a lot on what
+ kind of application is being built. Keep in mind,
+ the :class:`.Session` is just a workspace for your objects,
+ local to a particular database connection - if you think of
+ an application thread as a guest at a dinner party, the :class:`.Session`
+ is the guest's plate and the objects it holds are the food
+ (and the database...the kitchen?)! More on this topic
+ available at :ref:`session_faq_whentocreate`.
+
This custom-made :class:`~sqlalchemy.orm.session.Session` class will create
new :class:`~sqlalchemy.orm.session.Session` objects which are bound to our
database. Other transactional characteristics may be defined when calling
-:func:`~.sessionmaker` as well; these are described in a later
+:class:`~.sessionmaker` as well; these are described in a later
chapter. Then, whenever you need to have a conversation with the database, you
instantiate a :class:`~sqlalchemy.orm.session.Session`::
@@ -345,24 +338,13 @@ used, it retrieves a connection from a pool of connections maintained by the
:class:`.Engine`, and holds onto it until we commit all changes and/or close the
session object.
-.. topic:: Session Creational Patterns
-
- The business of acquiring a :class:`.Session` has a good deal of variety based
- on the variety of types of applications and frameworks out there.
- Keep in mind the :class:`.Session` is just a workspace for your objects,
- local to a particular database connection - if you think of
- an application thread as a guest at a dinner party, the :class:`.Session`
- is the guest's plate and the objects it holds are the food
- (and the database...the kitchen?)! Hints on
- how :class:`.Session` is integrated into an application are at
- :ref:`session_faq`.
Adding New Objects
==================
To persist our ``User`` object, we :meth:`~.Session.add` it to our :class:`~sqlalchemy.orm.session.Session`::
- >>> ed_user = User('ed', 'Ed Jones', 'edspassword')
+ >>> ed_user = User(name='ed', fullname='Ed Jones', password='edspassword')
>>> session.add(ed_user)
At this point, we say that the instance is **pending**; no SQL has yet been issued
@@ -393,7 +375,7 @@ added:
LIMIT ? OFFSET ?
('ed', 1, 0)
{stop}>>> our_user
- <User('ed','Ed Jones', 'edspassword')>
+ <User(name='ed', fullname='Ed Jones', password='edspassword')>
In fact, the :class:`~sqlalchemy.orm.session.Session` has identified that the
row returned is the **same** row as one already represented within its
@@ -403,7 +385,7 @@ that which we just added::
>>> ed_user is our_user
True
-The ORM concept at work here is known as an `identity map <http://martinfowler.com/eaaCatalog/identityMap.html>`_
+The ORM concept at work here is known as an :term:`identity map`
and ensures that
all operations upon a particular row within a
:class:`~sqlalchemy.orm.session.Session` operate upon the same set of data.
@@ -420,11 +402,11 @@ We can add more ``User`` objects at once using
.. sourcecode:: python+sql
>>> session.add_all([
- ... User('wendy', 'Wendy Williams', 'foobar'),
- ... User('mary', 'Mary Contrary', 'xxg527'),
- ... User('fred', 'Fred Flinstone', 'blah')])
+ ... User(name='wendy', fullname='Wendy Williams', password='foobar'),
+ ... User(name='mary', fullname='Mary Contrary', password='xxg527'),
+ ... User(name='fred', fullname='Fred Flinstone', password='blah')])
-Also, Ed has already decided his password isn't too secure, so lets change it:
+Also, we've decided the password for Ed isn't too secure, so lets change it:
.. sourcecode:: python+sql
@@ -436,16 +418,16 @@ for example, that ``Ed Jones`` has been modified:
.. sourcecode:: python+sql
>>> session.dirty
- IdentitySet([<User('ed','Ed Jones', 'f8s7ccs')>])
+ IdentitySet([<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>])
and that three new ``User`` objects are pending:
.. sourcecode:: python+sql
>>> session.new # doctest: +SKIP
- IdentitySet([<User('wendy','Wendy Williams', 'foobar')>,
- <User('mary','Mary Contrary', 'xxg527')>,
- <User('fred','Fred Flinstone', 'blah')>])
+ IdentitySet([<User(name='wendy', fullname='Wendy Williams', password='foobar')>,
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')>,
+ <User(name='fred', fullname='Fred Flinstone', password='blah')>])
We tell the :class:`~sqlalchemy.orm.session.Session` that we'd like to issue
all remaining changes to the database and commit the transaction, which has
@@ -517,7 +499,7 @@ and we'll add another erroneous user, ``fake_user``:
.. sourcecode:: python+sql
- >>> fake_user = User('fakeuser', 'Invalid', '12345')
+ >>> fake_user = User(name='fakeuser', fullname='Invalid', password='12345')
>>> session.add(fake_user)
Querying the session, we can see that they're flushed into the current transaction:
@@ -536,7 +518,7 @@ Querying the session, we can see that they're flushed into the current transacti
FROM users
WHERE users.name IN (?, ?)
('Edwardo', 'fakeuser')
- {stop}[<User('Edwardo','Ed Jones', 'f8s7ccs')>, <User('fakeuser','Invalid', '12345')>]
+ {stop}[<User(name='Edwardo', fullname='Ed Jones', password='f8s7ccs')>, <User(user='fakeuser', fullname='Invalid', password='12345')>]
Rolling back, we can see that ``ed_user``'s name is back to ``ed``, and
``fake_user`` has been kicked out of the session:
@@ -572,7 +554,7 @@ issuing a SELECT illustrates the changes made to the database:
FROM users
WHERE users.name IN (?, ?)
('ed', 'fakeuser')
- {stop}[<User('ed','Ed Jones', 'f8s7ccs')>]
+ {stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>]
.. _ormtutorial_querying:
@@ -638,13 +620,13 @@ class:
users.password AS users_password
FROM users
()
- {stop}<User('ed','Ed Jones', 'f8s7ccs')> ed
- <User('wendy','Wendy Williams', 'foobar')> wendy
- <User('mary','Mary Contrary', 'xxg527')> mary
- <User('fred','Fred Flinstone', 'blah')> fred
+ {stop}<User(name='ed', fullname='Ed Jones', password='f8s7ccs')> ed
+ <User(name='wendy', fullname='Wendy Williams', password='foobar')> wendy
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')> mary
+ <User(name='fred', fullname='Fred Flinstone', password='blah')> fred
You can control the names of individual column expressions using the
-:meth:`~.CompareMixin.label` construct, which is available from
+:meth:`~.ColumnElement.label` construct, which is available from
any :class:`.ColumnElement`-derived object, as well as any class attribute which
is mapped to one (such as ``User.name``):
@@ -662,7 +644,7 @@ is mapped to one (such as ``User.name``):
The name given to a full entity such as ``User``, assuming that multiple
entities are present in the call to :meth:`~.Session.query`, can be controlled using
-:class:`~.orm.aliased` :
+:func:`~.sqlalchemy.orm.aliased` :
.. sourcecode:: python+sql
@@ -677,10 +659,10 @@ entities are present in the call to :meth:`~.Session.query`, can be controlled u
user_alias.password AS user_alias_password
FROM users AS user_alias
(){stop}
- <User('ed','Ed Jones', 'f8s7ccs')>
- <User('wendy','Wendy Williams', 'foobar')>
- <User('mary','Mary Contrary', 'xxg527')>
- <User('fred','Fred Flinstone', 'blah')>
+ <User(name='ed', fullname='Ed Jones', password='f8s7ccs')>
+ <User(name='wendy', fullname='Wendy Williams', password='foobar')>
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')>
+ <User(name='fred', fullname='Fred Flinstone', password='blah')>
Basic operations with :class:`~sqlalchemy.orm.query.Query` include issuing
LIMIT and OFFSET, most conveniently using Python array slices and typically in
@@ -697,8 +679,8 @@ conjunction with ORDER BY:
FROM users ORDER BY users.id
LIMIT ? OFFSET ?
(2, 1){stop}
- <User('wendy','Wendy Williams', 'foobar')>
- <User('mary','Mary Contrary', 'xxg527')>
+ <User(name='wendy', fullname='Wendy Williams', password='foobar')>
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')>
and filtering results, which is accomplished either with
:func:`~sqlalchemy.orm.query.Query.filter_by`, which uses keyword arguments:
@@ -747,13 +729,13 @@ users named "ed" with a full name of "Ed Jones", you can call
FROM users
WHERE users.name = ? AND users.fullname = ?
('ed', 'Ed Jones')
- {stop}<User('ed','Ed Jones', 'f8s7ccs')>
-
+ {stop}<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>
Common Filter Operators
-----------------------
-Here's a rundown of some of the most common operators used in :func:`~sqlalchemy.orm.query.Query.filter`:
+Here's a rundown of some of the most common operators used in
+:func:`~sqlalchemy.orm.query.Query.filter`:
* equals::
@@ -772,8 +754,9 @@ Here's a rundown of some of the most common operators used in :func:`~sqlalchemy
query.filter(User.name.in_(['ed', 'wendy', 'jack']))
# works with query objects too:
-
- query.filter(User.name.in_(session.query(User.name).filter(User.name.like('%ed%'))))
+ query.filter(User.name.in_(
+ session.query(User.name).filter(User.name.like('%ed%'))
+ ))
* NOT IN::
@@ -781,24 +764,28 @@ Here's a rundown of some of the most common operators used in :func:`~sqlalchemy
* IS NULL::
- filter(User.name == None)
+ query.filter(User.name == None)
* IS NOT NULL::
- filter(User.name != None)
+ query.filter(User.name != None)
* AND::
+ # use and_()
from sqlalchemy import and_
- filter(and_(User.name == 'ed', User.fullname == 'Ed Jones'))
+ query.filter(and_(User.name == 'ed', User.fullname == 'Ed Jones'))
- # or call filter()/filter_by() multiple times
- filter(User.name == 'ed').filter(User.fullname == 'Ed Jones')
+ # or send multiple expressions to .filter()
+ query.filter(User.name == 'ed', User.fullname == 'Ed Jones')
+
+ # or chain multiple filter()/filter_by() calls
+ query.filter(User.name == 'ed').filter(User.fullname == 'Ed Jones')
* OR::
from sqlalchemy import or_
- filter(or_(User.name == 'ed', User.name == 'wendy'))
+ query.filter(or_(User.name == 'ed', User.name == 'wendy'))
* match::
@@ -809,76 +796,101 @@ Here's a rundown of some of the most common operators used in :func:`~sqlalchemy
Returning Lists and Scalars
---------------------------
-The :meth:`~sqlalchemy.orm.query.Query.all()`,
-:meth:`~sqlalchemy.orm.query.Query.one()`, and
-:meth:`~sqlalchemy.orm.query.Query.first()` methods of
-:class:`~sqlalchemy.orm.query.Query` immediately issue SQL and return a
-non-iterator value. :meth:`~sqlalchemy.orm.query.Query.all()` returns a list:
-
-.. sourcecode:: python+sql
-
- >>> query = session.query(User).filter(User.name.like('%ed')).order_by(User.id)
- {sql}>>> query.all() #doctest: +NORMALIZE_WHITESPACE
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? ORDER BY users.id
- ('%ed',)
- {stop}[<User('ed','Ed Jones', 'f8s7ccs')>, <User('fred','Fred Flinstone', 'blah')>]
-
-:meth:`~sqlalchemy.orm.query.Query.first()` applies a limit of one and returns
-the first result as a scalar:
-
-.. sourcecode:: python+sql
-
- {sql}>>> query.first() #doctest: +NORMALIZE_WHITESPACE
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? ORDER BY users.id
- LIMIT ? OFFSET ?
- ('%ed', 1, 0)
- {stop}<User('ed','Ed Jones', 'f8s7ccs')>
-
-:meth:`~sqlalchemy.orm.query.Query.one()`, fully fetches all rows, and if not
-exactly one object identity or composite row is present in the result, raises
-an error:
-
-.. sourcecode:: python+sql
-
- {sql}>>> from sqlalchemy.orm.exc import MultipleResultsFound
- >>> try: #doctest: +NORMALIZE_WHITESPACE
- ... user = query.one()
- ... except MultipleResultsFound, e:
- ... print e
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? ORDER BY users.id
- ('%ed',)
- {stop}Multiple rows were found for one()
-
-.. sourcecode:: python+sql
-
- {sql}>>> from sqlalchemy.orm.exc import NoResultFound
- >>> try: #doctest: +NORMALIZE_WHITESPACE
- ... user = query.filter(User.id == 99).one()
- ... except NoResultFound, e:
- ... print e
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? AND users.id = ? ORDER BY users.id
- ('%ed', 99)
- {stop}No row was found for one()
+A number of methods on :class:`.Query`
+immediately issue SQL and return a value containing loaded
+database results. Here's a brief tour:
+
+* :meth:`~.Query.all()` returns a list:
+
+ .. sourcecode:: python+sql
+
+ >>> query = session.query(User).filter(User.name.like('%ed')).order_by(User.id)
+ {sql}>>> query.all() #doctest: +NORMALIZE_WHITESPACE
+ SELECT users.id AS users_id,
+ users.name AS users_name,
+ users.fullname AS users_fullname,
+ users.password AS users_password
+ FROM users
+ WHERE users.name LIKE ? ORDER BY users.id
+ ('%ed',)
+ {stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>,
+ <User(name='fred', fullname='Fred Flinstone', password='blah')>]
+
+* :meth:`~.Query.first()` applies a limit of one and returns
+ the first result as a scalar:
+
+ .. sourcecode:: python+sql
+
+ {sql}>>> query.first() #doctest: +NORMALIZE_WHITESPACE
+ SELECT users.id AS users_id,
+ users.name AS users_name,
+ users.fullname AS users_fullname,
+ users.password AS users_password
+ FROM users
+ WHERE users.name LIKE ? ORDER BY users.id
+ LIMIT ? OFFSET ?
+ ('%ed', 1, 0)
+ {stop}<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>
+
+* :meth:`~.Query.one()`, fully fetches all rows, and if not
+ exactly one object identity or composite row is present in the result, raises
+ an error. With multiple rows found:
+
+ .. sourcecode:: python+sql
+
+ {sql}>>> from sqlalchemy.orm.exc import MultipleResultsFound
+ >>> try: #doctest: +NORMALIZE_WHITESPACE
+ ... user = query.one()
+ ... except MultipleResultsFound, e:
+ ... print e
+ SELECT users.id AS users_id,
+ users.name AS users_name,
+ users.fullname AS users_fullname,
+ users.password AS users_password
+ FROM users
+ WHERE users.name LIKE ? ORDER BY users.id
+ ('%ed',)
+ {stop}Multiple rows were found for one()
+
+ With no rows found:
+
+ .. sourcecode:: python+sql
+
+ {sql}>>> from sqlalchemy.orm.exc import NoResultFound
+ >>> try: #doctest: +NORMALIZE_WHITESPACE
+ ... user = query.filter(User.id == 99).one()
+ ... except NoResultFound, e:
+ ... print e
+ SELECT users.id AS users_id,
+ users.name AS users_name,
+ users.fullname AS users_fullname,
+ users.password AS users_password
+ FROM users
+ WHERE users.name LIKE ? AND users.id = ? ORDER BY users.id
+ ('%ed', 99)
+ {stop}No row was found for one()
+
+ The :meth:`~.Query.one` method is great for systems that expect to handle
+ "no items found" versus "multiple items found" differently; such as a RESTful
+ web service, which may want to raise a "404 not found" when no results are found,
+ but raise an application error when multiple results are found.
+
+* :meth:`~.Query.scalar` invokes the :meth:`~.Query.one` method, and upon
+ success returns the first column of the row:
+
+ .. sourcecode:: python+sql
+
+ >>> query = session.query(User.id).filter(User.name.like('%ed')).\
+ ... order_by(User.id)
+ {sql}>>> query.scalar() #doctest: +NORMALIZE_WHITESPACE
+ SELECT users.id AS users_id
+ FROM users
+ WHERE users.name LIKE ? ORDER BY users.id
+ LIMIT ? OFFSET ?
+ ('%ed', 1, 0)
+ {stop}7
+
+.. _orm_tutorial_literal_sql:
Using Literal SQL
-----------------
@@ -922,7 +934,7 @@ method:
FROM users
WHERE id<? and name=? ORDER BY users.id
(224, 'fred')
- {stop}<User('fred','Fred Flinstone', 'blah')>
+ {stop}<User(name='fred', fullname='Fred Flinstone', password='blah')>
To use an entirely string-based statement, using
:meth:`~sqlalchemy.orm.query.Query.from_statement()`; just ensure that the
@@ -936,7 +948,7 @@ mapper (below illustrated using an asterisk):
... params(name='ed').all()
SELECT * FROM users where name=?
('ed',)
- {stop}[<User('ed','Ed Jones', 'f8s7ccs')>]
+ {stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>]
You can use :meth:`~sqlalchemy.orm.query.Query.from_statement()` to go
completely "raw", using string names to identify desired columns:
@@ -1056,6 +1068,16 @@ counting called :meth:`~sqlalchemy.orm.query.Query.count()`:
('%ed',)
{stop}2
+.. sidebar:: Counting on ``count()``
+
+ :meth:`.Query.count` used to be a very complicated method
+ when it would try to guess whether or not a subquery was needed
+ around the
+ existing query, and in some exotic cases it wouldn't do the right thing.
+ Now that it uses a simple subquery every time, it's only two lines long
+ and always returns the right answer. Use ``func.count()`` if a
+ particular statement absolutely cannot tolerate the subquery being present.
+
The :meth:`~.Query.count()` method is used to determine
how many rows the SQL statement would return. Looking
at the generated SQL above, SQLAlchemy always places whatever it is we are
@@ -1123,15 +1145,12 @@ declarative, we define this table along with its mapped class, ``Address``:
...
... user = relationship("User", backref=backref('addresses', order_by=id))
...
- ... def __init__(self, email_address):
- ... self.email_address = email_address
- ...
... def __repr__(self):
- ... return "<Address('%s')>" % self.email_address
+ ... return "<Address(email_address='%s')>" % self.email_address
The above class introduces the :class:`.ForeignKey` construct, which is a
directive applied to :class:`.Column` that indicates that values in this
-column should be **constrained** to be values present in the named remote
+column should be :term:`constrained` to be values present in the named remote
column. This is a core feature of relational databases, and is the "glue" that
transforms an otherwise unconnected collection of tables to have rich
overlapping relationships. The :class:`.ForeignKey` above expresses that
@@ -1143,17 +1162,17 @@ tells the ORM that the ``Address`` class itself should be linked
to the ``User`` class, using the attribute ``Address.user``.
:func:`.relationship` uses the foreign key
relationships between the two tables to determine the nature of
-this linkage, determining that ``Address.user`` will be **many-to-one**.
+this linkage, determining that ``Address.user`` will be :term:`many to one`.
A subdirective of :func:`.relationship` called :func:`.backref` is
placed inside of :func:`.relationship`, providing details about
the relationship as expressed in reverse, that of a collection of ``Address``
objects on ``User`` referenced by ``User.addresses``. The reverse
-side of a many-to-one relationship is always **one-to-many**.
+side of a many-to-one relationship is always :term:`one to many`.
A full catalog of available :func:`.relationship` configurations
is at :ref:`relationship_patterns`.
The two complementing relationships ``Address.user`` and ``User.addresses``
-are referred to as a **bidirectional relationship**, and is a key
+are referred to as a :term:`bidirectional relationship`, and is a key
feature of the SQLAlchemy ORM. The section :ref:`relationships_backref`
discusses the "backref" feature in detail.
@@ -1218,7 +1237,7 @@ default, the collection is a Python list.
.. sourcecode:: python+sql
- >>> jack = User('jack', 'Jack Bean', 'gjffdd')
+ >>> jack = User(name='jack', fullname='Jack Bean', password='gjffdd')
>>> jack.addresses
[]
@@ -1239,14 +1258,15 @@ using any SQL:
.. sourcecode:: python+sql
>>> jack.addresses[1]
- <Address('j25@yahoo.com')>
+ <Address(email_address='j25@yahoo.com')>
>>> jack.addresses[1].user
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
-Let's add and commit ``Jack Bean`` to the database. ``jack`` as well as the
-two ``Address`` members in his ``addresses`` collection are both added to the
-session at once, using a process known as **cascading**:
+Let's add and commit ``Jack Bean`` to the database. ``jack`` as well
+as the two ``Address`` members in the corresponding ``addresses``
+collection are both added to the session at once, using a process
+known as **cascading**:
.. sourcecode:: python+sql
@@ -1276,7 +1296,7 @@ Querying for Jack, we get just Jack back. No SQL is yet issued for Jack's addre
('jack',)
{stop}>>> jack
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
Let's look at the ``addresses`` collection. Watch the SQL:
@@ -1290,10 +1310,10 @@ Let's look at the ``addresses`` collection. Watch the SQL:
FROM addresses
WHERE ? = addresses.user_id ORDER BY addresses.id
(5,)
- {stop}[<Address('jack@google.com')>, <Address('j25@yahoo.com')>]
+ {stop}[<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
When we accessed the ``addresses`` collection, SQL was suddenly issued. This
-is an example of a **lazy loading relationship**. The ``addresses`` collection
+is an example of a :term:`lazy loading` relationship. The ``addresses`` collection
is now loaded and behaves just like an ordinary list. We'll cover ways
to optimize the loading of this collection in a bit.
@@ -1318,7 +1338,8 @@ Below we load the ``User`` and ``Address`` entities at once using this method:
... filter(User.id==Address.user_id).\
... filter(Address.email_address=='jack@google.com').\
... all(): # doctest: +NORMALIZE_WHITESPACE
- ... print u, a
+ ... print u
+ ... print a
SELECT users.id AS users_id,
users.name AS users_name,
users.fullname AS users_fullname,
@@ -1330,10 +1351,11 @@ Below we load the ``User`` and ``Address`` entities at once using this method:
WHERE users.id = addresses.user_id
AND addresses.email_address = ?
('jack@google.com',)
- {stop}<User('jack','Jack Bean', 'gjffdd')> <Address('jack@google.com')>
+ {stop}<User(name='jack', fullname='Jack Bean', password='gjffdd')>
+ <Address(email_address='jack@google.com')>
-The actual SQL JOIN syntax, on the other hand, is most easily achieved using the :meth:`.Query.join`
-method:
+The actual SQL JOIN syntax, on the other hand, is most easily achieved
+using the :meth:`.Query.join` method:
.. sourcecode:: python+sql
@@ -1347,7 +1369,7 @@ method:
FROM users JOIN addresses ON users.id = addresses.user_id
WHERE addresses.email_address = ?
('jack@google.com',)
- {stop}[<User('jack','Jack Bean', 'gjffdd')>]
+ {stop}[<User(name='jack', fullname='Jack Bean', email_address='gjffdd')>]
:meth:`.Query.join` knows how to join between ``User``
and ``Address`` because there's only one foreign key between them. If there
@@ -1457,11 +1479,11 @@ accessible through an attribute called ``c``:
ON users.id = anon_1.user_id
ORDER BY users.id
('*',)
- {stop}<User('ed','Ed Jones', 'f8s7ccs')> None
- <User('wendy','Wendy Williams', 'foobar')> None
- <User('mary','Mary Contrary', 'xxg527')> None
- <User('fred','Fred Flinstone', 'blah')> None
- <User('jack','Jack Bean', 'gjffdd')> 2
+ {stop}<User(name='ed', fullname='Ed Jones', password='f8s7ccs')> None
+ <User(name='wendy', fullname='Wendy Williams', password='foobar')> None
+ <User(name='mary', fullname='Mary Contrary', password='xxg527')> None
+ <User(name='fred', fullname='Fred Flinstone', password='blah')> None
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')> 2
Selecting Entities from Subqueries
----------------------------------
@@ -1478,7 +1500,8 @@ to associate an "alias" of a mapped class to a subquery:
>>> adalias = aliased(Address, stmt)
>>> for user, address in session.query(User, adalias).\
... join(adalias, User.addresses): # doctest: +NORMALIZE_WHITESPACE
- ... print user, address
+ ... print user
+ ... print address
SELECT users.id AS users_id,
users.name AS users_name,
users.fullname AS users_fullname,
@@ -1494,7 +1517,8 @@ to associate an "alias" of a mapped class to a subquery:
WHERE addresses.email_address != ?) AS anon_1
ON users.id = anon_1.user_id
('j25@yahoo.com',)
- {stop}<User('jack','Jack Bean', 'gjffdd')> <Address('jack@google.com')>
+ {stop}<User(name='jack', fullname='Jack Bean', password='gjffdd')>
+ <Address(email_address='jack@google.com')>
Using EXISTS
------------
@@ -1611,13 +1635,13 @@ and behavior:
Eager Loading
=============
-Recall earlier that we illustrated a **lazy loading** operation, when
+Recall earlier that we illustrated a :term:`lazy loading` operation, when
we accessed the ``User.addresses`` collection of a ``User`` and SQL
was emitted. If you want to reduce the number of queries (dramatically, in many cases),
-we can apply an **eager load** to the query operation. SQLAlchemy
+we can apply an :term:`eager load` to the query operation. SQLAlchemy
offers three types of eager loading, two of which are automatic, and a third
which involves custom criterion. All three are usually invoked via functions known
-as **query options** which give additional instructions to the :class:`.Query` on how
+as :term:`query options` which give additional instructions to the :class:`.Query` on how
we would like various attributes to be loaded, via the :meth:`.Query.options` method.
Subquery Load
@@ -1655,10 +1679,10 @@ very easy to use:
ORDER BY anon_1.users_id, addresses.id
('jack',)
{stop}>>> jack
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
>>> jack.addresses
- [<Address('jack@google.com')>, <Address('j25@yahoo.com')>]
+ [<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
Joined Load
-------------
@@ -1691,10 +1715,10 @@ will emit the extra join regardless:
('jack',)
{stop}>>> jack
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
>>> jack.addresses
- [<Address('jack@google.com')>, <Address('j25@yahoo.com')>]
+ [<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
Note that even though the OUTER JOIN resulted in two rows, we still only got
one instance of ``User`` back. This is because :class:`.Query` applies a "uniquing"
@@ -1752,10 +1776,10 @@ attribute:
('jack',)
{stop}>>> jacks_addresses
- [<Address('jack@google.com')>, <Address('j25@yahoo.com')>]
+ [<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
>>> jacks_addresses[0].user
- <User('jack','Jack Bean', 'gjffdd')>
+ <User(name='jack', fullname='Jack Bean', password='gjffdd')>
For more information on eager loading, including how to configure various forms
of loading by default, see the section :doc:`/orm/loading`.
@@ -1835,13 +1859,15 @@ including the cascade configuration (we'll leave the constructor out too)::
... fullname = Column(String)
... password = Column(String)
...
- ... addresses = relationship("Address", backref='user', cascade="all, delete, delete-orphan")
+ ... addresses = relationship("Address", backref='user',
+ ... cascade="all, delete, delete-orphan")
...
... def __repr__(self):
- ... return "<User('%s','%s', '%s')>" % (self.name, self.fullname, self.password)
+ ... return "<User(name='%s', fullname='%s', password'%s')>" % (
+ ... self.name, self.fullname, self.password)
-Then we recreate ``Address``, noting that in this case we've created the ``Address.user`` relationship
-via the ``User`` class already::
+Then we recreate ``Address``, noting that in this case we've created
+the ``Address.user`` relationship via the ``User`` class already::
>>> class Address(Base):
... __tablename__ = 'addresses'
@@ -1850,11 +1876,12 @@ via the ``User`` class already::
... user_id = Column(Integer, ForeignKey('users.id'))
...
... def __repr__(self):
- ... return "<Address('%s')>" % self.email_address
+ ... return "<Address(email_address='%s')>" % self.email_address
-Now when we load Jack (below using :meth:`~.Query.get`, which loads by primary key),
-removing an address from his ``addresses`` collection will result in that
-``Address`` being deleted:
+Now when we load the user ``jack`` (below using :meth:`~.Query.get`,
+which loads by primary key), removing an address from the
+corresponding ``addresses`` collection will result in that ``Address``
+being deleted:
.. sourcecode:: python+sql
@@ -1895,7 +1922,8 @@ removing an address from his ``addresses`` collection will result in that
('jack@google.com', 'j25@yahoo.com')
{stop}1
-Deleting Jack will delete both Jack and his remaining ``Address``:
+Deleting Jack will delete both Jack and the remaining ``Address`` associated
+with the user:
.. sourcecode:: python+sql
@@ -1991,6 +2019,11 @@ via the ``post_keywords`` table::
... def __init__(self, keyword):
... self.keyword = keyword
+.. note::
+
+ The above class declarations illustrate explicit ``__init__()`` methods.
+ Remember, when using Declarative, it's optional!
+
Above, the many-to-many relationship is ``BlogPost.keywords``. The defining
feature of a many-to-many relationship is the ``secondary`` keyword argument
which references a :class:`~sqlalchemy.schema.Table` object representing the
@@ -2112,10 +2145,10 @@ keyword string 'firstpost'":
AND keywords.id = post_keywords.keyword_id
AND keywords.keyword = ?)
('firstpost',)
- {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User('wendy','Wendy Williams', 'foobar')>)]
+ {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User(name='wendy', fullname='Wendy Williams', password='foobar')>)]
-If we want to look up just Wendy's posts, we can tell the query to narrow down
-to her as a parent:
+If we want to look up posts owned by the user ``wendy``, we can tell
+the query to narrow down to that ``User`` object as a parent:
.. sourcecode:: python+sql
@@ -2134,7 +2167,7 @@ to her as a parent:
AND keywords.id = post_keywords.keyword_id
AND keywords.keyword = ?))
(2, 'firstpost')
- {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User('wendy','Wendy Williams', 'foobar')>)]
+ {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User(name='wendy', fullname='Wendy Williams', password='foobar')>)]
Or we can use Wendy's own ``posts`` relationship, which is a "dynamic"
relationship, to query straight from there:
@@ -2155,7 +2188,7 @@ relationship, to query straight from there:
AND keywords.id = post_keywords.keyword_id
AND keywords.keyword = ?))
(2, 'firstpost')
- {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User('wendy','Wendy Williams', 'foobar')>)]
+ {stop}[BlogPost("Wendy's Blog Post", 'This is a test', <User(name='wendy', fullname='Wendy Williams', password='foobar')>)]
Further Reference
==================
diff --git a/doc/build/requirements.txt b/doc/build/requirements.txt
index 2951cdd49..34f031b0b 100644
--- a/doc/build/requirements.txt
+++ b/doc/build/requirements.txt
@@ -1 +1,3 @@
mako
+changelog>=0.3.4
+sphinx-paramlinks>=0.2.2
diff --git a/doc/build/static/docs.css b/doc/build/static/docs.css
index 62e102c50..bd08f3b20 100644
--- a/doc/build/static/docs.css
+++ b/doc/build/static/docs.css
@@ -151,7 +151,6 @@ a.headerlink:hover {
#docs-body-container {
background-color:#EFEFEF;
border: solid 1px #CCC;
-
}
#docs-body,
@@ -166,6 +165,10 @@ a.headerlink:hover {
}
+#docs-body {
+ min-height: 700px;
+}
+
#docs-sidebar > ul {
font-size:.85em;
}
@@ -319,10 +322,24 @@ th.field-name {
text-align:right;
}
+div.section {
+ clear:right;
+}
div.note, div.warning, p.deprecated, div.topic, div.admonition {
background-color:#EEFFEF;
}
+.footnote {
+ font-size: .95em;
+}
+
+div.faq {
+ background-color: #EFEFEF;
+}
+
+div.faq ul {
+ list-style: square outside none;
+}
div.admonition, div.topic, .deprecated, .versionadded, .versionchanged {
border:1px solid #CCCCCC;
@@ -332,6 +349,21 @@ div.admonition, div.topic, .deprecated, .versionadded, .versionchanged {
box-shadow: 2px 2px 3px #DFDFDF;
}
+
+div.sidebar {
+ background-color: #FFFFEE;
+ border: 1px solid #DDDDBB;
+ float: right;
+ margin: 10px 0 10px 1em;
+ padding: 7px 7px 0;
+ width: 40%;
+ font-size:.9em;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+
/* grrr sphinx changing your document structures, removing classes.... */
.versionadded .versionmodified,
@@ -339,7 +371,7 @@ div.admonition, div.topic, .deprecated, .versionadded, .versionchanged {
.deprecated .versionmodified,
.versionadded > p:first-child > span:first-child,
.versionchanged > p:first-child > span:first-child,
-.deprecated > p:first-child > span:first-child
+.deprecated > p:first-child > span:first-child
{
background-color: #ECF0F3;
color: #990000;
diff --git a/doc/build/templates/genindex.mako b/doc/build/templates/genindex.mako
index 0a25b6724..9ea6795bc 100644
--- a/doc/build/templates/genindex.mako
+++ b/doc/build/templates/genindex.mako
@@ -1,4 +1,4 @@
-<%inherit file="${context['layout']}"/>
+<%inherit file="layout.mako"/>
<%block name="show_title" filter="util.striptags">
${_('Index')}
diff --git a/doc/build/templates/layout.mako b/doc/build/templates/layout.mako
index ba3e81c01..a879fa481 100644
--- a/doc/build/templates/layout.mako
+++ b/doc/build/templates/layout.mako
@@ -2,13 +2,19 @@
<%!
local_script_files = []
+
+ default_css_files = [
+ '_static/pygments.css',
+ '_static/docs.css',
+ ]
%>
+
<%doc>
Structural elements are all prefixed with "docs-"
- to prevent conflicts when the structure is integrated into the
+ to prevent conflicts when the structure is integrated into the
main site.
-
+
docs-container ->
docs-header ->
docs-search
@@ -31,16 +37,19 @@ withsidebar = bool(toc) and current_page_name != 'index'
<%block name="head_title">
% if current_page_name != 'index':
- ${capture(self.show_title) | util.striptags} &mdash;
+ ${capture(self.show_title) | util.striptags} &mdash;
% endif
${docstitle|h}
</%block>
<div id="docs-container">
+
<%block name="headers">
- <link rel="stylesheet" href="${pathto('_static/pygments.css', 1)}" type="text/css" />
- <link rel="stylesheet" href="${pathto('_static/docs.css', 1)}" type="text/css" />
+
+ ${parent.headers()}
+
+ <!-- begin layout.mako headers -->
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
@@ -50,9 +59,13 @@ withsidebar = bool(toc) and current_page_name != 'index'
FILE_SUFFIX: '${file_suffix}'
};
</script>
+
+ <!-- begin iterate through sphinx environment script_files -->
% for scriptfile in script_files + self.attr.local_script_files:
<script type="text/javascript" src="${pathto(scriptfile, 1)}"></script>
% endfor
+ <!-- end iterate through sphinx environment script_files -->
+
<script type="text/javascript" src="${pathto('_static/init.js', 1)}"></script>
% if hasdoc('about'):
<link rel="author" title="${_('About these documents')}" href="${pathto('about')}" />
@@ -72,6 +85,8 @@ withsidebar = bool(toc) and current_page_name != 'index'
% if prevtopic:
<link rel="prev" title="${prevtopic['title']|util.striptags}" href="${prevtopic['link']|h}" />
% endif
+ <!-- end layout.mako headers -->
+
</%block>
<div id="docs-header">
@@ -129,7 +144,7 @@ withsidebar = bool(toc) and current_page_name != 'index'
% endfor
% endif
% if current_page_name != 'index':
- » ${self.show_title()}
+ » ${self.show_title()}
% endif
<h2>
diff --git a/doc/build/templates/page.mako b/doc/build/templates/page.mako
index 61cf9a05e..e0f98cf64 100644
--- a/doc/build/templates/page.mako
+++ b/doc/build/templates/page.mako
@@ -1,2 +1,2 @@
-<%inherit file="${context['layout']}"/>
+<%inherit file="layout.mako"/>
${body| util.strip_toplevel_anchors} \ No newline at end of file
diff --git a/doc/build/templates/rtd_layout.mako b/doc/build/templates/rtd_layout.mako
deleted file mode 100644
index a3083bd7b..000000000
--- a/doc/build/templates/rtd_layout.mako
+++ /dev/null
@@ -1,164 +0,0 @@
-<%inherit file="/layout.mako"/>
-
-<%
- newscript = []
- # strip out script files that RTD wants to provide
- for script in script_files:
- for token in ("jquery.js", "underscore.js", "doctools.js"):
- if token in script:
- break
- else:
- newscript.append(script)
- script_files[:] = newscript
-%>
-
-<%block name="headers">
-<!-- RTD <head> -->
-<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.4/jquery.min.js"></script>
-<script type="text/javascript" src="${MEDIA_URL}javascript/underscore.js"></script>
-<script type="text/javascript" src="${MEDIA_URL}javascript/doctools.js"></script>
-<script type="text/javascript" src="${MEDIA_URL}javascript/searchtools.js"></script>
- <script type="text/javascript">
- var doc_version = "${current_version}";
- var doc_slug = "${slug}";
- var static_root = "${pathto('_static', 1)}"
- </script>
-<!-- end RTD <head> -->
- ${parent.headers()}
-</%block>
-
-${next.body()}
-
-<%block name="footer">
-${parent.footer()}
- <!-- End original user content -->
-## Keep this here, so that the RTD logo doesn't stomp on the bottom of the theme.
-<br>
-<br>
-<br>
-
-<style type="text/css">
- .badge {
- position: fixed;
- display: block;
- bottom: 5px;
- height: 40px;
- text-indent: -9999em;
- border-radius: 3px;
- -moz-border-radius: 3px;
- -webkit-border-radius: 3px;
- box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 1px 0 rgba(255, 255, 255, 0.2) inset;
- -moz-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 1px 0 rgba(255, 255, 255, 0.2) inset;
- -webkit-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 1px 0 rgba(255, 255, 255, 0.2) inset;
- }
- #version_menu {
- position: fixed;
- display: none;
- bottom: 11px;
- right: 166px;
- list-style-type: none;
- margin: 0;
- }
- .footer_popout:hover #version_menu {
- display: block;
- }
- #version_menu li {
- display: block;
- float: right;
- }
- #version_menu li a {
- display: block;
- padding: 6px 10px 4px 10px;
- margin: 7px 7px 0 0;
- font-weight: bold;
- font-size: 14px;
- height: 20px;
- line-height: 17px;
- text-decoration: none;
- color: #fff;
- background: #8ca1af url(http://media.readthedocs.org/images/gradient-light.png) bottom left repeat-x;
- border-radius: 3px;
- -moz-border-radius: 3px;
- -webkit-border-radius: 3px;
- box-shadow: 0 1px 1px #465158;
- -moz-box-shadow: 0 1px 1px #465158;
- -webkit-box-shadow: 0 1px 1px #465158;
- text-shadow: 0 1px 1px rgba(0, 0, 0, 0.5);
- }
- #version_menu li a:hover {
- text-decoration: none;
- background-color: #697983;
- box-shadow: 0 1px 0px #465158;
- -moz-box-shadow: 0 1px 0px #465158;
- -webkit-box-shadow: 0 1px 0px #465158;
- }
- .badge.rtd {
- background: #257597 url(http://media.readthedocs.org/images/badge-rtd.png) top left no-repeat;
- border: 1px solid #282E32;
- width: 160px;
- right: 5px;
- }
- .badge.revsys { background: #465158 url(http://media.readthedocs.org/images/badge-revsys.png) top left no-repeat;
- border: 1px solid #1C5871;
- width: 290px;
- right: 173px;
- }
- .badge.revsys-inline-sponsored {
- position: inherit;
- margin-left: auto;
- margin-right: 175px;
- margin-bottom: 5px;
- background: #465158 url(http://media.readthedocs.org/images/badge-revsys.png) top left no-repeat;
- border: 1px solid #1C5871;
- width: 290px;
- right: 173px;
- }
- .badge.revsys-inline {
- position: inherit;
- margin-left: auto;
- margin-right: 175px;
- margin-bottom: 5px;
- background: #465158 url(http://media.readthedocs.org/images/badge-revsys-sm.png) top left no-repeat;
- border: 1px solid #1C5871;
- width: 205px;
- right: 173px;
- }
-
-</style>
-<div class="rtd_doc_footer">
- <div class="footer_popout">
- <a href="http://readthedocs.org/projects/${slug}/?fromdocs=${slug}" class="badge rtd">Brought to you by Read the Docs</a>
- <ul id="version_menu">
- ## rtd fills this in client side
- </ul>
- </div>
-</div>
-<!-- RTD Analytics Code -->
-<script type="text/javascript">
- var _gaq = _gaq || [];
- _gaq.push(['_setAccount', 'UA-17997319-1']);
- _gaq.push(['_trackPageview']);
-
- (function() {
- var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
- ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
- var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
- })();
-</script>
-
-% if analytics_code:
-<!-- User Analytics Code -->
-<script type="text/javascript">
- var _gaq = _gaq || [];
- _gaq.push(['_setAccount', '${analytics_code}']);
- _gaq.push(['_trackPageview']);
-
- (function() {
- var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
- ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
- var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
- })();
-</script>
-% endif
-
-</%block>
diff --git a/doc/build/templates/search.mako b/doc/build/templates/search.mako
index 68e3fb991..83a3fcd84 100644
--- a/doc/build/templates/search.mako
+++ b/doc/build/templates/search.mako
@@ -1,4 +1,4 @@
-<%inherit file="${context['layout']}"/>
+<%inherit file="layout.mako"/>
<%!
local_script_files = ['_static/searchtools.js']
diff --git a/doc/build/templates/static_base.mako b/doc/build/templates/static_base.mako
index 40bf1c68f..9eb5ec046 100644
--- a/doc/build/templates/static_base.mako
+++ b/doc/build/templates/static_base.mako
@@ -9,6 +9,15 @@
<%block name="head_title">
</%block>
</title>
+
+ <%block name="css">
+ <!-- begin iterate through SQLA + sphinx environment css_files -->
+ % for cssfile in self.attr.default_css_files + css_files:
+ <link rel="stylesheet" href="${pathto(cssfile, 1)}" type="text/css" />
+ % endfor
+ <!-- end iterate through SQLA + sphinx environment css_files -->
+ </%block>
+
<%block name="headers"/>
</head>
<body>
diff --git a/doc/build/testdocs.py b/doc/build/testdocs.py
index 9d84808e5..815aa8669 100644
--- a/doc/build/testdocs.py
+++ b/doc/build/testdocs.py
@@ -60,8 +60,7 @@ def replace_file(s, newfile):
raise ValueError("Couldn't find suitable create_engine call to replace '%s' in it" % oldfile)
return s
-#for filename in 'orm/tutorial','core/tutorial',:
-for filename in 'core/tutorial',:
+for filename in 'orm/tutorial','core/tutorial',:
filename = '%s.rst' % filename
s = open(filename).read()
#s = replace_file(s, ':memory:')