summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2010-11-06 11:49:45 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2010-11-06 11:49:45 -0400
commit4e2c0f10cd164511b9c6377b72a8c0527e4eb716 (patch)
tree830319060dc68ec7de0eaa04eaf8ba8e7948d535 /lib
parentd9dc05adb689bc4eab2227a96af0d874696cc63d (diff)
parent30bc42403754110df1fdec3037c7700cc4f26b70 (diff)
downloadsqlalchemy-4e2c0f10cd164511b9c6377b72a8c0527e4eb716.tar.gz
- merge tip
Diffstat (limited to 'lib')
-rw-r--r--lib/sqlalchemy/__init__.py3
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py79
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py6
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py4
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py8
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py70
-rw-r--r--lib/sqlalchemy/engine/__init__.py7
-rw-r--r--lib/sqlalchemy/engine/default.py1
-rwxr-xr-xlib/sqlalchemy/ext/declarative.py89
-rw-r--r--lib/sqlalchemy/orm/__init__.py50
-rw-r--r--lib/sqlalchemy/orm/dynamic.py2
-rw-r--r--lib/sqlalchemy/orm/interfaces.py34
-rw-r--r--lib/sqlalchemy/orm/mapper.py23
-rw-r--r--lib/sqlalchemy/orm/properties.py2
-rw-r--r--lib/sqlalchemy/orm/query.py65
-rw-r--r--lib/sqlalchemy/orm/session.py66
-rw-r--r--lib/sqlalchemy/orm/state.py10
-rw-r--r--lib/sqlalchemy/orm/strategies.py62
-rw-r--r--lib/sqlalchemy/schema.py12
-rw-r--r--lib/sqlalchemy/sql/__init__.py1
-rw-r--r--lib/sqlalchemy/sql/compiler.py8
-rw-r--r--lib/sqlalchemy/sql/expression.py52
-rw-r--r--lib/sqlalchemy/test/__init__.py3
-rw-r--r--lib/sqlalchemy/test/engines.py2
-rw-r--r--lib/sqlalchemy/test/profiling.py2
-rw-r--r--lib/sqlalchemy/test/testing.py7
-rw-r--r--lib/sqlalchemy/types.py66
-rw-r--r--lib/sqlalchemy/util.py14
-rw-r--r--lib/sqlalchemy_nose/__init__.py0
-rw-r--r--lib/sqlalchemy_nose/config.py (renamed from lib/sqlalchemy/test/config.py)0
-rw-r--r--lib/sqlalchemy_nose/noseplugin.py (renamed from lib/sqlalchemy/test/noseplugin.py)10
33 files changed, 556 insertions, 206 deletions
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index cb4e8e10b..5eea53ac6 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -43,6 +43,7 @@ from sqlalchemy.sql import (
subquery,
text,
tuple_,
+ type_coerce,
union,
union_all,
update,
@@ -114,6 +115,6 @@ from sqlalchemy.engine import create_engine, engine_from_config
__all__ = sorted(name for name, obj in locals().items()
if not (name.startswith('_') or inspect.ismodule(obj)))
-__version__ = '0.6.5'
+__version__ = '0.6.6'
del inspect, sys
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 95a5bf4c4..5c3b72647 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -114,6 +114,8 @@ Known Issues
------------
* No support for more than one ``IDENTITY`` column per table
+* reflection of indexes does not work with versions older than
+ SQL Server 2005
"""
import datetime, decimal, inspect, operator, sys, re
@@ -755,20 +757,20 @@ class MSSQLCompiler(compiler.SQLCompiler):
return None
def visit_table(self, table, mssql_aliased=False, **kwargs):
- if mssql_aliased:
+ if mssql_aliased is table:
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
# alias schema-qualified tables
alias = self._schema_aliased_table(table)
if alias is not None:
- return self.process(alias, mssql_aliased=True, **kwargs)
+ return self.process(alias, mssql_aliased=table, **kwargs)
else:
return super(MSSQLCompiler, self).visit_table(table, **kwargs)
def visit_alias(self, alias, **kwargs):
# translate for schema-qualified table aliases
self.tablealiases[alias.original] = alias
- kwargs['mssql_aliased'] = True
+ kwargs['mssql_aliased'] = alias.original
return super(MSSQLCompiler, self).visit_alias(alias, **kwargs)
def visit_extract(self, extract, **kw):
@@ -1124,26 +1126,55 @@ class MSDialect(default.DefaultDialect):
view_names = [r[0] for r in connection.execute(s)]
return view_names
- # The cursor reports it is closed after executing the sp.
@reflection.cache
def get_indexes(self, connection, tablename, schema=None, **kw):
+ # using system catalogs, don't support index reflection
+ # below MS 2005
+ if self.server_version_info < MS_2005_VERSION:
+ return []
+
current_schema = schema or self.default_schema_name
- col_finder = re.compile("(\w+)")
full_tname = "%s.%s" % (current_schema, tablename)
- indexes = []
- s = sql.text("exec sp_helpindex '%s'" % full_tname)
- rp = connection.execute(s)
- if rp.closed:
- # did not work for this setup.
- return []
+
+ rp = connection.execute(
+ sql.text("select ind.index_id, ind.is_unique, ind.name "
+ "from sys.indexes as ind join sys.tables as tab on "
+ "ind.object_id=tab.object_id "
+ "join sys.schemas as sch on sch.schema_id=tab.schema_id "
+ "where tab.name = :tabname "
+ "and sch.name=:schname "
+ "and ind.is_primary_key=0",
+ bindparams=[
+ sql.bindparam('tabname', tablename, sqltypes.Unicode),
+ sql.bindparam('schname', current_schema, sqltypes.Unicode)
+ ]
+ )
+ )
+ indexes = {}
for row in rp:
- if 'primary key' not in row['index_description']:
- indexes.append({
- 'name' : row['index_name'],
- 'column_names' : col_finder.findall(row['index_keys']),
- 'unique': 'unique' in row['index_description']
- })
- return indexes
+ indexes[row['index_id']] = {
+ 'name':row['name'],
+ 'unique':row['is_unique'] == 1,
+ 'column_names':[]
+ }
+ rp = connection.execute(
+ sql.text("select ind_col.index_id, col.name from sys.columns as col "
+ "join sys.index_columns as ind_col on "
+ "ind_col.column_id=col.column_id "
+ "join sys.tables as tab on tab.object_id=col.object_id "
+ "join sys.schemas as sch on sch.schema_id=tab.schema_id "
+ "where tab.name=:tabname "
+ "and sch.name=:schname",
+ bindparams=[
+ sql.bindparam('tabname', tablename, sqltypes.Unicode),
+ sql.bindparam('schname', current_schema, sqltypes.Unicode)
+ ]),
+ )
+ for row in rp:
+ if row['index_id'] in indexes:
+ indexes[row['index_id']]['column_names'].append(row['name'])
+
+ return indexes.values()
@reflection.cache
def get_view_definition(self, connection, viewname, schema=None, **kw):
@@ -1207,13 +1238,13 @@ class MSDialect(default.DefaultDialect):
"Did not recognize type '%s' of column '%s'" %
(type, name))
coltype = sqltypes.NULLTYPE
+ else:
+ if issubclass(coltype, sqltypes.Numeric) and \
+ coltype is not MSReal:
+ kwargs['scale'] = numericscale
+ kwargs['precision'] = numericprec
- if issubclass(coltype, sqltypes.Numeric) and \
- coltype is not MSReal:
- kwargs['scale'] = numericscale
- kwargs['precision'] = numericprec
-
- coltype = coltype(**kwargs)
+ coltype = coltype(**kwargs)
cdict = {
'name' : name,
'type' : coltype,
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index cd1606dbf..4dd6436cd 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -1,3 +1,5 @@
+# TODO: should be using the sys. catalog with SQL Server, not information schema
+
from sqlalchemy import Table, MetaData, Column, ForeignKey
from sqlalchemy.types import String, Unicode, Integer, TypeDecorator
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index b6728c6b0..c5f471942 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -85,7 +85,9 @@ class MSDialect_pymssql(MSDialect):
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
- opts.pop('port', None)
+ port = opts.pop('port', None)
+ if port and 'host' in opts:
+ opts['host'] = "%s:%s" % (opts['host'], port)
return [[], opts]
def is_disconnect(self, e):
@@ -99,4 +101,4 @@ class MSDialect_pymssql(MSDialect):
else:
return False
-dialect = MSDialect_pymssql \ No newline at end of file
+dialect = MSDialect_pymssql
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index a2d3748f3..660d201d1 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -2371,8 +2371,8 @@ class MySQLTableDefinitionParser(object):
r'(?: +COLLATE +(?P<collate>[\w_]+))?'
r'(?: +(?P<notnull>NOT NULL))?'
r'(?: +DEFAULT +(?P<default>'
- r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+)'
- r'(?:ON UPDATE \w+)?'
+ r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+'
+ r'(?: +ON UPDATE \w+)?)'
r'))?'
r'(?: +(?P<autoincr>AUTO_INCREMENT))?'
r'(?: +COMMENT +(P<comment>(?:\x27\x27|[^\x27])+))?'
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index 06d3e6616..0c0c39b67 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -92,7 +92,7 @@ class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect):
def _extract_error_code(self, exception):
# e.g.: DBAPIError: (Error) Table 'test.u2' doesn't exist
# [SQLCode: 1146], [SQLState: 42S02] 'DESCRIBE `u2`' ()
- m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.orig.args))
+ m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.args))
c = m.group(1)
if c:
return int(c)
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 0aa348953..256972696 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -640,9 +640,11 @@ class OracleDialect(default.DefaultDialect):
def initialize(self, connection):
super(OracleDialect, self).initialize(connection)
- self.implicit_returning = self.server_version_info > (10, ) and \
- self.__dict__.get('implicit_returning', True)
-
+ self.implicit_returning = self.__dict__.get(
+ 'implicit_returning',
+ self.server_version_info > (10, )
+ )
+
if self._is_oracle_8:
self.colspecs = self.colspecs.copy()
self.colspecs.pop(sqltypes.Interval)
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 89769b8c0..0d103cb0d 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -171,7 +171,7 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
"""
__visit_name__ = 'ARRAY'
- def __init__(self, item_type, mutable=True):
+ def __init__(self, item_type, mutable=True, as_tuple=False):
"""Construct an ARRAY.
E.g.::
@@ -186,9 +186,14 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
``ARRAY(ARRAY(Integer))`` or such. The type mapping figures out on
the fly
- :param mutable: Defaults to True: specify whether lists passed to this
+ :param mutable=True: Specify whether lists passed to this
class should be considered mutable. If so, generic copy operations
(typically used by the ORM) will shallow-copy values.
+
+ :param as_tuple=False: Specify whether return results should be converted
+ to tuples from lists. DBAPIs such as psycopg2 return lists by default.
+ When tuples are returned, the results are hashable. This flag can only
+ be set to ``True`` when ``mutable`` is set to ``False``. (new in 0.6.5)
"""
if isinstance(item_type, ARRAY):
@@ -198,7 +203,12 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
item_type = item_type()
self.item_type = item_type
self.mutable = mutable
-
+ if mutable and as_tuple:
+ raise exc.ArgumentError(
+ "mutable must be set to False if as_tuple is True."
+ )
+ self.as_tuple = as_tuple
+
def copy_value(self, value):
if value is None:
return None
@@ -224,7 +234,8 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
def adapt(self, impltype):
return impltype(
self.item_type,
- mutable=self.mutable
+ mutable=self.mutable,
+ as_tuple=self.as_tuple
)
def bind_processor(self, dialect):
@@ -252,19 +263,28 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
if item_proc:
def convert_item(item):
if isinstance(item, list):
- return [convert_item(child) for child in item]
+ r = [convert_item(child) for child in item]
+ if self.as_tuple:
+ r = tuple(r)
+ return r
else:
return item_proc(item)
else:
def convert_item(item):
if isinstance(item, list):
- return [convert_item(child) for child in item]
+ r = [convert_item(child) for child in item]
+ if self.as_tuple:
+ r = tuple(r)
+ return r
else:
return item
def process(value):
if value is None:
return value
- return [convert_item(item) for item in value]
+ r = [convert_item(item) for item in value]
+ if self.as_tuple:
+ r = tuple(r)
+ return r
return process
PGArray = ARRAY
@@ -1033,28 +1053,32 @@ class PGDialect(default.DefaultDialect):
else:
args = ()
- if attype in self.ischema_names:
- coltype = self.ischema_names[attype]
- elif attype in enums:
- enum = enums[attype]
- coltype = ENUM
- if "." in attype:
- kwargs['schema'], kwargs['name'] = attype.split('.')
- else:
- kwargs['name'] = attype
- args = tuple(enum['labels'])
- elif attype in domains:
- domain = domains[attype]
- if domain['attype'] in self.ischema_names:
+ while True:
+ if attype in self.ischema_names:
+ coltype = self.ischema_names[attype]
+ break
+ elif attype in enums:
+ enum = enums[attype]
+ coltype = ENUM
+ if "." in attype:
+ kwargs['schema'], kwargs['name'] = attype.split('.')
+ else:
+ kwargs['name'] = attype
+ args = tuple(enum['labels'])
+ break
+ elif attype in domains:
+ domain = domains[attype]
+ attype = domain['attype']
# A table can't override whether the domain is nullable.
nullable = domain['nullable']
if domain['default'] and not default:
# It can, however, override the default
# value, but can't set it to null.
default = domain['default']
- coltype = self.ischema_names[domain['attype']]
- else:
- coltype = None
+ continue
+ else:
+ coltype = None
+ break
if coltype:
coltype = coltype(*args, **kwargs)
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 43d3dd038..36b86cabf 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -278,14 +278,15 @@ def _coerce_config(configuration, prefix):
for key in configuration
if key.startswith(prefix))
for option, type_ in (
- ('convert_unicode', bool),
+ ('convert_unicode', util.bool_or_str('force')),
('pool_timeout', int),
- ('echo', bool),
- ('echo_pool', bool),
+ ('echo', util.bool_or_str('debug')),
+ ('echo_pool', util.bool_or_str('debug')),
('pool_recycle', int),
('pool_size', int),
('max_overflow', int),
('pool_threadlocal', bool),
+ ('use_native_unicode', bool),
):
util.coerce_kw_type(options, option, type_)
return options
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 390094c7d..13755d49a 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -565,7 +565,6 @@ class DefaultExecutionContext(base.ExecutionContext):
in all cases.
"""
-
return self.cursor.lastrowid
def handle_dbapi_exception(self, e):
diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py
index fabd9aaf9..dd2df63d3 100755
--- a/lib/sqlalchemy/ext/declarative.py
+++ b/lib/sqlalchemy/ext/declarative.py
@@ -589,7 +589,8 @@ keys, as a :class:`ForeignKey` itself contains references to columns
which can't be properly recreated at this level. For columns that
have foreign keys, as well as for the variety of mapper-level constructs
that require destination-explicit context, the
-:func:`~.declared_attr` decorator is provided so that
+:func:`~.declared_attr` decorator (renamed from ``sqlalchemy.util.classproperty`` in 0.6.5)
+is provided so that
patterns common to many classes can be defined as callables::
from sqlalchemy.ext.declarative import declared_attr
@@ -820,6 +821,81 @@ from multiple collections::
id = Column(Integer, primary_key=True)
+Defining Indexes in Mixins
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you need to define a multi-column index that applies to all tables
+that make use of a particular mixin, you will need to do this in a
+metaclass as shown in the following example::
+
+ from sqlalchemy.ext.declarative import DeclarativeMeta
+
+ class MyMixinMeta(DeclarativeMeta):
+
+ def __init__(cls,*args,**kw):
+ if getattr(cls,'_decl_class_registry',None) is None:
+ return
+ super(MyMeta,cls).__init__(*args,**kw)
+ # Index creation done here
+ Index('test',cls.a,cls.b)
+
+ class MyMixin(object):
+ __metaclass__=MyMixinMeta
+ a = Column(Integer)
+ b = Column(Integer)
+
+ class MyModel(Base,MyMixin):
+ __tablename__ = 'atable'
+ c = Column(Integer,primary_key=True)
+
+Using multiple Mixins that require Metaclasses
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you end up in a situation where you need to use multiple mixins and
+more than one of them uses a metaclass to, for example, create a
+multi-column index, then you will need to create a metaclass that
+correctly combines the actions of the other metaclasses. For example::
+
+ class MyMeta1(DeclarativeMeta):
+
+ def __init__(cls,*args,**kw):
+ if getattr(cls,'_decl_class_registry',None) is None:
+ return
+ super(MyMeta1,cls).__init__(*args,**kw)
+ Index('ab',cls.a,cls.b)
+
+ class MyMixin1(object):
+ __metaclass__=MyMeta1
+ a = Column(Integer)
+ b = Column(Integer)
+
+ class MyMeta2(DeclarativeMeta):
+
+ def __init__(cls,*args,**kw):
+ if getattr(cls,'_decl_class_registry',None) is None:
+ return
+ super(MyMeta2,cls).__init__(*args,**kw)
+ Index('cd',cls.c,cls.d)
+
+ class MyMixin2(object):
+ __metaclass__=MyMeta2
+ c = Column(Integer)
+ d = Column(Integer)
+
+ class CombinedMeta(MyMeta1,MyMeta2):
+ # This is needed to successfully combine
+ # two mixins which both have metaclasses
+ pass
+
+ class MyModel(Base,MyMixin1,MyMixin2):
+ __tablename__ = 'awooooga'
+ __metaclass__ = CombinedMeta
+ z = Column(Integer,primary_key=True)
+
+For this reason, if a mixin requires a custom metaclass, this should
+be mentioned in any documentation of that mixin to avoid confusion
+later down the line.
+
Class Constructor
=================
@@ -939,7 +1015,7 @@ def _as_declarative(cls, classname, dict_):
if name not in dict_ and not (
'__table__' in dict_ and
(obj.name or name) in dict_['__table__'].c
- ):
+ ) and name not in potential_columns:
potential_columns[name] = \
column_copies[obj] = \
obj.copy()
@@ -971,6 +1047,13 @@ def _as_declarative(cls, classname, dict_):
for k, v in mapper_args.iteritems():
mapper_args[k] = column_copies.get(v,v)
+
+ if classname in cls._decl_class_registry:
+ util.warn("The classname %r is already in the registry of this"
+ " declarative base, mapped to %r" % (
+ classname,
+ cls._decl_class_registry[classname]
+ ))
cls._decl_class_registry[classname] = cls
our_stuff = util.OrderedDict()
@@ -1288,7 +1371,7 @@ class declared_attr(property):
a mapped property or special declarative member name.
.. note:: @declared_attr is available as
- sqlalchemy.util.classproperty for SQLAlchemy versions
+ ``sqlalchemy.util.classproperty`` for SQLAlchemy versions
0.6.2, 0.6.3, 0.6.4.
@declared_attr turns the attribute into a scalar-like
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 8b32d1a27..18031e15f 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -84,6 +84,7 @@ __all__ = (
'eagerload',
'eagerload_all',
'extension',
+ 'immediateload',
'join',
'joinedload',
'joinedload_all',
@@ -335,7 +336,12 @@ def relationship(argument, secondary=None, **kwargs):
``select``. Values include:
* ``select`` - items should be loaded lazily when the property is first
- accessed, using a separate SELECT statement.
+ accessed, using a separate SELECT statement, or identity map
+ fetch for simple many-to-one references.
+
+ * ``immediate`` - items should be loaded as the parents are loaded,
+ using a separate SELECT statement, or identity map fetch for
+ simple many-to-one references. (new as of 0.6.5)
* ``joined`` - items should be loaded "eagerly" in the same query as
that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
@@ -957,11 +963,24 @@ def compile_mappers():
m.compile()
def clear_mappers():
- """Remove all mappers that have been created thus far.
-
- The mapped classes will return to their initial "unmapped" state and can
- be re-mapped with new mappers.
-
+ """Remove all mappers from all classes.
+
+ This function removes all instrumentation from classes and disposes
+ of their associated mappers. Once called, the classes are unmapped
+ and can be later re-mapped with new mappers.
+
+ :func:`.clear_mappers` is *not* for normal use, as there is literally no
+ valid usage for it outside of very specific testing scenarios. Normally,
+ mappers are permanent structural components of user-defined classes, and
+ are never discarded independently of their class. If a mapped class itself
+ is garbage collected, its mapper is automatically disposed of as well. As
+ such, :func:`.clear_mappers` is only for usage in test suites that re-use
+ the same classes with different mappings, which is itself an extremely rare
+ use case - the only such use case is in fact SQLAlchemy's own test suite,
+ and possibly the test suites of other ORM extension libraries which
+ intend to test various combinations of mapper construction upon a fixed
+ set of classes.
+
"""
mapperlib._COMPILE_MUTEX.acquire()
try:
@@ -1122,7 +1141,7 @@ def subqueryload_all(*keys):
query.options(subqueryload_all(User.orders, Order.items,
Item.keywords))
- See also: :func:`joinedload_all`, :func:`lazyload`
+ See also: :func:`joinedload_all`, :func:`lazyload`, :func:`immediateload`
"""
return strategies.EagerLazyOption(keys, lazy="subquery", chained=True)
@@ -1134,7 +1153,7 @@ def lazyload(*keys):
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
- See also: :func:`eagerload`, :func:`subqueryload`
+ See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
"""
return strategies.EagerLazyOption(keys, lazy=True)
@@ -1145,11 +1164,24 @@ def noload(*keys):
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
- See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
+ See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
"""
return strategies.EagerLazyOption(keys, lazy=None)
+def immediateload(*keys):
+ """Return a ``MapperOption`` that will convert the property of the given
+ name into an immediate load.
+
+ Used with :meth:`~sqlalchemy.orm.query.Query.options`.
+
+ See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
+
+ New as of verison 0.6.5.
+
+ """
+ return strategies.EagerLazyOption(keys, lazy='immediate')
+
def contains_alias(alias):
"""Return a ``MapperOption`` that will indicate to the query that
the main table has been aliased.
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index 2b0883cfb..caa057717 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -36,7 +36,7 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- return (None, None)
+ return None, None, None
log.class_logger(DynaLoader)
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index f4933b8ca..6d30abf91 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -78,38 +78,8 @@ class MapperProperty(object):
pass
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- """Return a 2-tuple consiting of two row processing functions and
- an instance post-processing function.
-
- Input arguments are the query.SelectionContext and the *first*
- applicable row of a result set obtained within
- query.Query.instances(), called only the first time a particular
- mapper's populate_instance() method is invoked for the overall result.
-
- The settings contained within the SelectionContext as well as the
- columns present in the row (which will be the same columns present in
- all rows) are used to determine the presence and behavior of the
- returned callables. The callables will then be used to process all
- rows and instances.
-
- Callables are of the following form::
-
- def new_execute(state, dict_, row, isnew):
- # process incoming instance state and given row.
- # the instance is
- # "new" and was just created upon receipt of this row.
- "isnew" indicates if the instance was newly created as a
- result of reading this row
-
- def existing_execute(state, dict_, row):
- # process incoming instance state and given row. the
- # instance is
- # "existing" and was created based on a previous row.
-
- return (new_execute, existing_execute)
-
- Either of the three tuples can be ``None`` in which case no function
- is called.
+ """Return a 3-tuple consisting of three row processing functions.
+
"""
raise NotImplementedError()
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index a40021663..6ab6208fd 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -2135,10 +2135,11 @@ class Mapper(object):
state.load_path = load_path
if not new_populators:
- new_populators[:], existing_populators[:] = \
- self._populators(context, path, row,
- adapter)
-
+ self._populators(context, path, row, adapter,
+ new_populators,
+ existing_populators
+ )
+
if isnew:
populators = new_populators
else:
@@ -2309,20 +2310,24 @@ class Mapper(object):
return instance
return _instance
- def _populators(self, context, path, row, adapter):
+ def _populators(self, context, path, row, adapter,
+ new_populators, existing_populators):
"""Produce a collection of attribute level row processor callables."""
- new_populators, existing_populators = [], []
+ delayed_populators = []
for prop in self._props.itervalues():
- newpop, existingpop = prop.create_row_processor(
+ newpop, existingpop, delayedpop = prop.create_row_processor(
context, path,
self, row, adapter)
if newpop:
new_populators.append((prop.key, newpop))
if existingpop:
existing_populators.append((prop.key, existingpop))
- return new_populators, existing_populators
-
+ if delayedpop:
+ delayed_populators.append((prop.key, delayedpop))
+ if delayed_populators:
+ new_populators.extend(delayed_populators)
+
def _configure_subclass_mapper(self, context, path, adapter):
"""Produce a mapper level row processor callable factory for mappers
inheriting this one."""
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 4efd2acc9..0cbbf630d 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -255,7 +255,7 @@ class DescriptorProperty(MapperProperty):
pass
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- return (None, None)
+ return None, None, None
def merge(self, session, source_state, source_dict,
dest_state, dest_dict, load, _recursive):
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 605f391aa..468bcc19d 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -425,8 +425,8 @@ class Query(object):
return stmt._annotate({'_halt_adapt': True})
def subquery(self):
- """return the full SELECT statement represented by this Query,
- embedded within an Alias.
+ """return the full SELECT statement represented by this :class:`.Query`,
+ embedded within an :class:`.Alias`.
Eager JOIN generation within the query is disabled.
@@ -436,7 +436,33 @@ class Query(object):
"""
return self.enable_eagerloads(False).statement.alias()
+
+ def label(self, name):
+ """Return the full SELECT statement represented by this :class:`.Query`, converted
+ to a scalar subquery with a label of the given name.
+
+ Analagous to :meth:`sqlalchemy.sql._SelectBaseMixin.label`.
+
+ New in 0.6.5.
+
+ """
+
+ return self.enable_eagerloads(False).statement.label(name)
+
+ def as_scalar(self):
+ """Return the full SELECT statement represented by this :class:`.Query`, converted
+ to a scalar subquery.
+
+ Analagous to :meth:`sqlalchemy.sql._SelectBaseMixin.as_scalar`.
+
+ New in 0.6.5.
+
+ """
+
+ return self.enable_eagerloads(False).statement.as_scalar()
+
+
def __clause_element__(self):
return self.enable_eagerloads(False).with_labels().statement
@@ -758,7 +784,36 @@ class Query(object):
# end Py2K
except StopIteration:
return None
+
+ @_generative()
+ def with_entities(self, *entities):
+ """Return a new :class:`.Query` replacing the SELECT list with the given
+ entities.
+
+ e.g.::
+
+ # Users, filtered on some arbitrary criterion
+ # and then ordered by related email address
+ q = session.query(User).\\
+ join(User.address).\\
+ filter(User.name.like('%ed%')).\\
+ order_by(Address.email)
+ # given *only* User.id==5, Address.email, and 'q', what
+ # would the *next* User in the result be ?
+ subq = q.with_entities(Address.email).\\
+ order_by(None).\\
+ filter(User.id==5).\\
+ subquery()
+ q = q.join((subq, subq.c.email < Address.email)).\\
+ limit(1)
+
+ New in 0.6.5.
+
+ """
+ self._set_entities(entities)
+
+
@_generative()
def add_columns(self, *column):
"""Add one or more column expressions to the list
@@ -1721,10 +1776,8 @@ class Query(object):
query_entity.row_processor(self, context, custom_rows)
for query_entity in self._entities
])
-
- if not single_entity:
- labels = [l for l in labels if l]
-
+
+
while True:
context.progress = {}
context.partials = {}
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 3444c12ac..5e1c7ba09 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -532,20 +532,13 @@ class Session(object):
transaction or nested transaction, an error is raised, unless
``subtransactions=True`` or ``nested=True`` is specified.
- The ``subtransactions=True`` flag indicates that this ``begin()`` can
- create a subtransaction if a transaction is already in progress. A
- subtransaction is a non-transactional, delimiting construct that
- allows matching begin()/commit() pairs to be nested together, with
- only the outermost begin/commit pair actually affecting transactional
- state. When a rollback is issued, the subtransaction will directly
- roll back the innermost real transaction, however each subtransaction
- still must be explicitly rolled back to maintain proper stacking of
- subtransactions.
-
- If no transaction is in progress, then a real transaction is begun.
-
+ The ``subtransactions=True`` flag indicates that this :meth:`~.Session.begin`
+ can create a subtransaction if a transaction is already in progress.
+ For documentation on subtransactions, please see :ref:`session_subtransactions`.
+
The ``nested`` flag begins a SAVEPOINT transaction and is equivalent
- to calling ``begin_nested()``.
+ to calling :meth:`~.Session.begin_nested`. For documentation on SAVEPOINT
+ transactions, please see :ref:`session_begin_nested`.
"""
if self.transaction is not None:
@@ -567,10 +560,8 @@ class Session(object):
The target database(s) must support SQL SAVEPOINTs or a
SQLAlchemy-supported vendor implementation of the idea.
- The nested transaction is a real transation, unlike a "subtransaction"
- which corresponds to multiple ``begin()`` calls. The next
- ``rollback()`` or ``commit()`` call will operate upon this nested
- transaction.
+ For documentation on SAVEPOINT
+ transactions, please see :ref:`session_begin_nested`.
"""
return self.begin(nested=True)
@@ -593,9 +584,16 @@ class Session(object):
def commit(self):
"""Flush pending changes and commit the current transaction.
-
+
If no transaction is in progress, this method raises an
InvalidRequestError.
+
+ By default, the :class:`.Session` also expires all database
+ loaded state on all ORM-managed attributes after transaction commit.
+ This so that subsequent operations load the most recent
+ data from the database. This behavior can be disabled using
+ the ``expire_on_commit=False`` option to :func:`.sessionmaker` or
+ the :class:`.Session` constructor.
If a subtransaction is in effect (which occurs when begin() is called
multiple times), the subtransaction will be closed, and the next call
@@ -1490,22 +1488,42 @@ class Session(object):
ext.after_flush_postexec(self, flush_context)
def is_modified(self, instance, include_collections=True, passive=False):
- """Return True if instance has modified attributes.
+ """Return ``True`` if instance has modified attributes.
This method retrieves a history instance for each instrumented
attribute on the instance and performs a comparison of the current
- value to its previously committed value. Note that instances present
- in the 'dirty' collection may result in a value of ``False`` when
- tested with this method.
+ value to its previously committed value.
- `include_collections` indicates if multivalued collections should be
+ ``include_collections`` indicates if multivalued collections should be
included in the operation. Setting this to False is a way to detect
only local-column based properties (i.e. scalar columns or many-to-one
foreign keys) that would result in an UPDATE for this instance upon
flush.
- The `passive` flag indicates if unloaded attributes and collections
+ The ``passive`` flag indicates if unloaded attributes and collections
should not be loaded in the course of performing this test.
+
+ A few caveats to this method apply:
+
+ * Instances present in the 'dirty' collection may result in a value
+ of ``False`` when tested with this method. This because while
+ the object may have received attribute set events, there may be
+ no net changes on its state.
+ * Scalar attributes may not have recorded the "previously" set
+ value when a new value was applied, if the attribute was not loaded,
+ or was expired, at the time the new value was received - in these
+ cases, the attribute is assumed to have a change, even if there is
+ ultimately no net change against its database value. SQLAlchemy in
+ most cases does not need the "old" value when a set event occurs, so
+ it skips the expense of a SQL call if the old value isn't present,
+ based on the assumption that an UPDATE of the scalar value is
+ usually needed, and in those few cases where it isn't, is less
+ expensive on average than issuing a defensive SELECT.
+
+ The "old" value is fetched unconditionally only if the attribute
+ container has the "active_history" flag set to ``True``. This flag
+ is set typically for primary key attributes and scalar references
+ that are not a simple many-to-one.
"""
try:
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index dc8a07c17..bea4ee500 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -343,8 +343,14 @@ class InstanceState(object):
previous = attr.copy(previous)
self.committed_state[attr.key] = previous
-
- if not self.modified:
+
+
+ # the "or not self.modified" is defensive at
+ # this point. The assertion below is expected
+ # to be True:
+ # assert self._strong_obj is None or self.modified
+
+ if self._strong_obj is None or not self.modified:
instance_dict = self._instance_dict()
if instance_dict:
instance_dict._modified.add(self)
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 1696d1456..1de2dc3ee 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -89,7 +89,7 @@ class UninstrumentedColumnLoader(LoaderStrategy):
column_collection.append(c)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- return None, None
+ return None, None, None
class ColumnLoader(LoaderStrategy):
"""Strategize the loading of a plain column-based MapperProperty."""
@@ -128,11 +128,11 @@ class ColumnLoader(LoaderStrategy):
if col is not None and col in row:
def new_execute(state, dict_, row):
dict_[key] = row[col]
- return new_execute, None
+ return new_execute, None, None
else:
def new_execute(state, dict_, row):
state.expire_attribute_pre_commit(dict_, key)
- return new_execute, None
+ return new_execute, None, None
log.class_logger(ColumnLoader)
@@ -185,7 +185,7 @@ class CompositeColumnLoader(ColumnLoader):
def new_execute(state, dict_, row):
dict_[key] = composite_class(*[row[c] for c in columns])
- return new_execute, None
+ return new_execute, None, None
log.class_logger(CompositeColumnLoader)
@@ -212,7 +212,7 @@ class DeferredColumnLoader(LoaderStrategy):
# fire off on next access.
state.reset(dict_, key)
- return new_execute, None
+ return new_execute, None, None
def init(self):
if hasattr(self.parent_property, 'composite_class'):
@@ -349,7 +349,7 @@ class NoLoader(AbstractRelationshipLoader):
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
def new_execute(state, dict_, row):
state.initialize(self.key)
- return new_execute, None
+ return new_execute, None, None
log.class_logger(NoLoader)
@@ -510,7 +510,7 @@ class LazyLoader(AbstractRelationshipLoader):
# any existing state.
state.reset(dict_, key)
- return new_execute, None
+ return new_execute, None, None
@classmethod
def _create_lazy_clause(cls, prop, reverse_direction=False):
@@ -684,6 +684,23 @@ class LoadLazyAttribute(object):
else:
return None
+class ImmediateLoader(AbstractRelationshipLoader):
+ def init_class_attribute(self, mapper):
+ self.parent_property.\
+ _get_strategy(LazyLoader).\
+ init_class_attribute(mapper)
+
+ def setup_query(self, context, entity,
+ path, adapter, column_collection=None,
+ parentmapper=None, **kwargs):
+ pass
+
+ def create_row_processor(self, context, path, mapper, row, adapter):
+ def execute(state, dict_, row):
+ state.get_impl(self.key).get(state, dict_)
+
+ return None, None, execute
+
class SubqueryLoader(AbstractRelationshipLoader):
def init(self):
super(SubqueryLoader, self).init()
@@ -860,7 +877,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
path = interfaces._reduce_path(path)
if ('subquery', path) not in context.attributes:
- return None, None
+ return None, None, None
local_cols, remote_cols = self._local_remote_columns(self.parent_property)
@@ -904,7 +921,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
state.get_impl(self.key).\
set_committed_value(state, dict_, scalar)
- return execute, None
+ return execute, None, None
log.class_logger(SubqueryLoader)
@@ -922,6 +939,7 @@ class EagerLoader(AbstractRelationshipLoader):
def setup_query(self, context, entity, path, adapter, \
column_collection=None, parentmapper=None,
+ allow_innerjoin=True,
**kwargs):
"""Add a left outer join to the statement thats being constructed."""
@@ -972,10 +990,18 @@ class EagerLoader(AbstractRelationshipLoader):
if self.parent_property.direction != interfaces.MANYTOONE:
context.multi_row_eager_loaders = True
+ innerjoin = allow_innerjoin and context.attributes.get(
+ ("eager_join_type", path),
+ self.parent_property.innerjoin)
+ if not innerjoin:
+ # if this is an outer join, all eager joins from
+ # here must also be outer joins
+ allow_innerjoin = False
+
context.create_eager_joins.append(
(self._create_eager_join, context,
entity, path, adapter,
- parentmapper, clauses)
+ parentmapper, clauses, innerjoin)
)
add_to_collection = context.secondary_columns
@@ -990,10 +1016,12 @@ class EagerLoader(AbstractRelationshipLoader):
path + (self.mapper,),
clauses,
parentmapper=self.mapper,
- column_collection=add_to_collection)
+ column_collection=add_to_collection,
+ allow_innerjoin=allow_innerjoin)
def _create_eager_join(self, context, entity,
- path, adapter, parentmapper, clauses):
+ path, adapter, parentmapper,
+ clauses, innerjoin):
if parentmapper is None:
localparent = entity.mapper
@@ -1048,10 +1076,6 @@ class EagerLoader(AbstractRelationshipLoader):
else:
onclause = self.parent_property
- innerjoin = context.attributes.get(
- ("eager_join_type", path),
- self.parent_property.innerjoin)
-
context.eager_joins[entity_key] = eagerjoin = \
mapperutil.join(
towrap,
@@ -1157,7 +1181,7 @@ class EagerLoader(AbstractRelationshipLoader):
"Multiple rows returned with "
"uselist=False for eagerly-loaded attribute '%s' "
% self)
- return new_execute, existing_execute
+ return new_execute, existing_execute, None
else:
def new_execute(state, dict_, row):
collection = attributes.init_state_collection(
@@ -1182,7 +1206,7 @@ class EagerLoader(AbstractRelationshipLoader):
'append_without_event')
context.attributes[(state, key)] = result_list
_instance(row, result_list)
- return new_execute, existing_execute
+ return new_execute, existing_execute, None
else:
return self.parent_property.\
_get_strategy(LazyLoader).\
@@ -1222,6 +1246,8 @@ def factory(identifier):
return LazyLoader
elif identifier == 'subquery':
return SubqueryLoader
+ elif identifier == 'immediate':
+ return ImmediateLoader
else:
return LazyLoader
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index 8070fd9ca..6966fb90b 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -1988,7 +1988,7 @@ class MetaData(SchemaItem):
from sqlalchemy.sql.util import sort_tables
return sort_tables(self.tables.itervalues())
- def reflect(self, bind=None, schema=None, only=None):
+ def reflect(self, bind=None, schema=None, views=False, only=None):
"""Load all available table definitions from the database.
Automatically creates ``Table`` entries in this ``MetaData`` for any
@@ -2004,7 +2004,10 @@ class MetaData(SchemaItem):
:param schema:
Optional, query and reflect tables from an alterate schema.
-
+
+ :param views:
+ If True, also reflect views.
+
:param only:
Optional. Load only a sub-set of available named tables. May be
specified as a sequence of names or a callable.
@@ -2033,6 +2036,11 @@ class MetaData(SchemaItem):
available = util.OrderedSet(bind.engine.table_names(schema,
connection=conn))
+ if views:
+ available.update(
+ bind.dialect.get_view_names(conn or bind, schema)
+ )
+
current = set(self.tables.iterkeys())
if only is None:
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index aa18eac17..2bb5f6ab4 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -47,6 +47,7 @@ from sqlalchemy.sql.expression import (
table,
text,
tuple_,
+ type_coerce,
union,
union_all,
update,
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index e47db7e28..d3b8bf023 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -153,6 +153,10 @@ class _CompileLabel(visitors.Visitable):
def __init__(self, col, name):
self.element = col
self.name = name
+
+ @property
+ def type(self):
+ return self.element.type
@property
def quote(self):
@@ -317,7 +321,7 @@ class SQLCompiler(engine.Compiled):
if result_map is not None:
result_map[labelname.lower()] = \
- (label.name, (label, label.element, labelname), label.element.type)
+ (label.name, (label, label.element, labelname), label.type)
return self.process(label.element,
within_columns_clause=True,
@@ -329,7 +333,7 @@ class SQLCompiler(engine.Compiled):
return self.process(label.element,
within_columns_clause=False,
**kw)
-
+
def visit_column(self, column, result_map=None, **kwargs):
name = column.name
if name is None:
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 219e3bf14..625893a68 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -45,8 +45,8 @@ __all__ = [
'except_', 'except_all', 'exists', 'extract', 'func', 'modifier',
'collate', 'insert', 'intersect', 'intersect_all', 'join', 'label',
'literal', 'literal_column', 'not_', 'null', 'or_', 'outparam',
- 'outerjoin', 'select', 'subquery', 'table', 'text', 'tuple_', 'union',
- 'union_all', 'update', ]
+ 'outerjoin', 'select', 'subquery', 'table', 'text', 'tuple_', 'type_coerce',
+ 'union', 'union_all', 'update', ]
PARSE_AUTOCOMMIT = util._symbol('PARSE_AUTOCOMMIT')
@@ -666,6 +666,54 @@ def tuple_(*expr):
"""
return _Tuple(*expr)
+
+def type_coerce(expr, type_):
+ """Coerce the given expression into the given type, on the Python side only.
+
+ :func:`.type_coerce` is roughly similar to :func:.`cast`, except no
+ "CAST" expression is rendered - the given type is only applied towards
+ expression typing and against received result values.
+
+ e.g.::
+
+ from sqlalchemy.types import TypeDecorator
+ import uuid
+
+ class AsGuid(TypeDecorator):
+ impl = String
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ return str(value)
+ else:
+ return None
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ return uuid.UUID(value)
+ else:
+ return None
+
+ conn.execute(
+ select([type_coerce(mytable.c.ident, AsGuid)]).\\
+ where(
+ type_coerce(mytable.c.ident, AsGuid) ==
+ uuid.uuid3(uuid.NAMESPACE_URL, 'bar')
+ )
+ )
+
+ """
+ if hasattr(expr, '__clause_expr__'):
+ return type_coerce(expr.__clause_expr__())
+
+ elif not isinstance(expr, Visitable):
+ if expr is None:
+ return null()
+ else:
+ return literal(expr, type_=type_)
+ else:
+ return _Label(None, expr, type_=type_)
+
def label(name, obj):
"""Return a :class:`_Label` object for the
diff --git a/lib/sqlalchemy/test/__init__.py b/lib/sqlalchemy/test/__init__.py
index d69cedefd..7356945d2 100644
--- a/lib/sqlalchemy/test/__init__.py
+++ b/lib/sqlalchemy/test/__init__.py
@@ -6,7 +6,8 @@ by noseplugin.NoseSQLAlchemy.
"""
-from sqlalchemy.test import testing, engines, requires, profiling, pickleable, config
+from sqlalchemy_nose import config
+from sqlalchemy.test import testing, engines, requires, profiling, pickleable
from sqlalchemy.test.schema import Column, Table
from sqlalchemy.test.testing import \
AssertsCompiledSQL, \
diff --git a/lib/sqlalchemy/test/engines.py b/lib/sqlalchemy/test/engines.py
index 8b930175f..acae1d28a 100644
--- a/lib/sqlalchemy/test/engines.py
+++ b/lib/sqlalchemy/test/engines.py
@@ -1,6 +1,6 @@
import sys, types, weakref
from collections import deque
-import config
+from sqlalchemy_nose import config
from sqlalchemy.util import function_named, callable
from sqlalchemy import event
import re
diff --git a/lib/sqlalchemy/test/profiling.py b/lib/sqlalchemy/test/profiling.py
index c5256affa..835253a3a 100644
--- a/lib/sqlalchemy/test/profiling.py
+++ b/lib/sqlalchemy/test/profiling.py
@@ -6,7 +6,7 @@ in a more fine-grained way than nose's profiling plugin.
"""
import os, sys
-from sqlalchemy.test import config
+from sqlalchemy_nose import config
from sqlalchemy.test.util import function_named, gc_collect
from nose import SkipTest
diff --git a/lib/sqlalchemy/test/testing.py b/lib/sqlalchemy/test/testing.py
index 41ba3038f..12cbe5e02 100644
--- a/lib/sqlalchemy/test/testing.py
+++ b/lib/sqlalchemy/test/testing.py
@@ -8,7 +8,8 @@ import types
import warnings
from cStringIO import StringIO
-from sqlalchemy.test import config, assertsql, util as testutil
+from sqlalchemy_nose import config
+from sqlalchemy.test import assertsql, util as testutil
from sqlalchemy.util import function_named, py3k
from engines import drop_all_tables
@@ -207,9 +208,9 @@ def _block_unconditionally(db, reason):
return function_named(maybe, fn_name)
return decorate
-def only_on(db, reason):
+def only_on(dbs, reason):
carp = _should_carp_about_exclusion(reason)
- spec = db_spec(db)
+ spec = db_spec(*util.to_list(dbs))
def decorate(fn):
fn_name = fn.__name__
def maybe(*args, **kw):
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index 46e5901a3..ee1fdc67f 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -132,18 +132,28 @@ class AbstractType(Visitable):
# ClauseElement.compile()....this is a mistake.
if not dialect:
+ dialect = self._default_dialect
+
+ return dialect.type_compiler.process(self)
+
+ @property
+ def _default_dialect(self):
+ if self.__class__.__module__.startswith("sqlalchemy.dialects"):
+ tokens = self.__class__.__module__.split(".")[0:3]
+ mod = ".".join(tokens)
+ return getattr(__import__(mod).dialects, tokens[-1]).dialect()
+ else:
global DefaultDialect
if DefaultDialect is None:
from sqlalchemy.engine.default import DefaultDialect
- dialect = DefaultDialect()
+ return DefaultDialect()
- return dialect.type_compiler.process(self)
-
def __str__(self):
# Py3K
#return unicode(self.compile())
# Py2K
- return unicode(self.compile()).encode('ascii', 'backslashreplace')
+ return unicode(self.compile()).\
+ encode('ascii', 'backslashreplace')
# end Py2K
def __init__(self, *args, **kwargs):
@@ -346,21 +356,19 @@ class TypeDecorator(AbstractType):
"require a class-level variable "
"'impl' which refers to the class of "
"type being decorated")
- self.impl = self.__class__.impl(*args, **kwargs)
+ self.impl = to_instance(self.__class__.impl, *args, **kwargs)
def adapt(self, cls):
return cls()
def dialect_impl(self, dialect):
key = (dialect.__class__, dialect.server_version_info)
+
try:
return self._impl_dict[key]
except KeyError:
pass
- # adapt the TypeDecorator first, in
- # the case that the dialect maps the TD
- # to one of its native types (i.e. PGInterval)
adapted = dialect.type_descriptor(self)
if adapted is not self:
self._impl_dict[key] = adapted
@@ -369,7 +377,7 @@ class TypeDecorator(AbstractType):
# otherwise adapt the impl type, link
# to a copy of this TypeDecorator and return
# that.
- typedesc = self.load_dialect_impl(dialect)
+ typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect)
tt = self.copy()
if not isinstance(tt, self.__class__):
raise AssertionError('Type object %s does not properly '
@@ -381,27 +389,33 @@ class TypeDecorator(AbstractType):
return tt
@util.memoized_property
+ def _impl_dict(self):
+ return {}
+
+ @util.memoized_property
def _type_affinity(self):
return self.impl._type_affinity
def type_engine(self, dialect):
- impl = self.dialect_impl(dialect)
- if not isinstance(impl, TypeDecorator):
- return impl
+ """Return a TypeEngine instance for this TypeDecorator.
+
+ """
+ adapted = dialect.type_descriptor(self)
+ if adapted is not self:
+ return adapted
+ elif isinstance(self.impl, TypeDecorator):
+ return self.impl.type_engine(dialect)
else:
- return impl.impl
+ return self.load_dialect_impl(dialect)
def load_dialect_impl(self, dialect):
- """Loads the dialect-specific implementation of this type.
+ """User hook which can be overridden to provide a different 'impl'
+ type per-dialect.
- by default calls dialect.type_descriptor(self.impl), but
- can be overridden to provide different behavior.
+ by default returns self.impl.
"""
- if isinstance(self.impl, TypeDecorator):
- return self.impl.dialect_impl(dialect)
- else:
- return dialect.type_descriptor(self.impl)
+ return self.impl
def __getattr__(self, key):
"""Proxy all other undefined accessors to the underlying
@@ -503,9 +517,11 @@ class TypeDecorator(AbstractType):
return self.impl.is_mutable()
def _adapt_expression(self, op, othertype):
- return self.impl._adapt_expression(op, othertype)
-
-
+ op, typ =self.impl._adapt_expression(op, othertype)
+ if typ is self.impl:
+ return op, self
+ else:
+ return op, typ
class MutableType(object):
"""A mixin that marks a :class:`TypeEngine` as representing
@@ -593,12 +609,12 @@ class MutableType(object):
"""Compare *x* == *y*."""
return x == y
-def to_instance(typeobj):
+def to_instance(typeobj, *arg, **kw):
if typeobj is None:
return NULLTYPE
if util.callable(typeobj):
- return typeobj()
+ return typeobj(*arg, **kw)
else:
return typeobj
diff --git a/lib/sqlalchemy/util.py b/lib/sqlalchemy/util.py
index 351b50883..e647e3d63 100644
--- a/lib/sqlalchemy/util.py
+++ b/lib/sqlalchemy/util.py
@@ -584,6 +584,18 @@ def asbool(obj):
raise ValueError("String is not true/false: %r" % obj)
return bool(obj)
+def bool_or_str(*text):
+ """Return a callable that will evaulate a string as
+ boolean, or one of a set of "alternate" string values.
+
+ """
+ def bool_or_value(obj):
+ if obj in text:
+ return obj
+ else:
+ return asbool(obj)
+ return bool_or_value
+
def coerce_kw_type(kw, key, type_, flexi_bool=True):
"""If 'key' is present in dict 'kw', coerce its value to type 'type\_' if
necessary. If 'flexi_bool' is True, the string '0' is considered false
@@ -745,7 +757,7 @@ class NamedTuple(tuple):
return t
def keys(self):
- return self._labels
+ return [l for l in self._labels if l is not None]
class OrderedProperties(object):
diff --git a/lib/sqlalchemy_nose/__init__.py b/lib/sqlalchemy_nose/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/lib/sqlalchemy_nose/__init__.py
diff --git a/lib/sqlalchemy/test/config.py b/lib/sqlalchemy_nose/config.py
index 7d528a04b..7d528a04b 100644
--- a/lib/sqlalchemy/test/config.py
+++ b/lib/sqlalchemy_nose/config.py
diff --git a/lib/sqlalchemy/test/noseplugin.py b/lib/sqlalchemy_nose/noseplugin.py
index 6a3106e69..8732142f7 100644
--- a/lib/sqlalchemy/test/noseplugin.py
+++ b/lib/sqlalchemy_nose/noseplugin.py
@@ -10,9 +10,9 @@ import StringIO
import nose.case
from nose.plugins import Plugin
-from sqlalchemy import util, log as sqla_log
-from sqlalchemy.test import testing, config, requires
-from sqlalchemy.test.config import (
+from sqlalchemy_nose import config
+
+from sqlalchemy_nose.config import (
_create_testing_engine, _engine_pool, _engine_strategy, _engine_uri, _list_dbs, _log,
_prep_testing_database, _require, _reverse_topological, _server_side_cursors,
_set_table_options, base_config, db, db_label, db_url, file_config, post_configure)
@@ -78,6 +78,10 @@ class NoseSQLAlchemy(Plugin):
self.options = options
def begin(self):
+ global testing, requires, util
+ from sqlalchemy.test import testing, requires
+ from sqlalchemy import util
+
testing.db = db
testing.requires = requires