summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2014-12-07 17:48:32 -0500
committerMike Bayer <mike_mp@zzzcomputing.com>2014-12-07 17:48:32 -0500
commit1b260c7959c9b89e6a3993d5d96bc6b0918a8fb0 (patch)
tree59602c3151094de79d84c60391cf64e4a1b94084
parent68f1bcc9da191fc72a943f2353c8d80a39bdb76a (diff)
parentc8817e608788799837a91b1d2616227594698d2b (diff)
downloadsqlalchemy-1b260c7959c9b89e6a3993d5d96bc6b0918a8fb0.tar.gz
Merge branch 'master' into ticket_3100
-rw-r--r--doc/build/changelog/changelog_09.rst46
-rw-r--r--doc/build/changelog/changelog_10.rst191
-rw-r--r--doc/build/changelog/migration_10.rst161
-rw-r--r--doc/build/core/selectable.rst3
-rw-r--r--doc/build/core/types.rst3
-rw-r--r--doc/build/faq.rst4
-rw-r--r--doc/build/orm/examples.rst2
-rw-r--r--doc/build/orm/inheritance.rst21
-rw-r--r--examples/generic_associations/discriminator_on_association.py1
-rw-r--r--examples/versioned_history/history_meta.py104
-rw-r--r--examples/versioned_history/test_versioning.py173
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py105
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py11
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqldb.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/pymysql.py3
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py186
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py3
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py49
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py64
-rw-r--r--lib/sqlalchemy/engine/__init__.py11
-rw-r--r--lib/sqlalchemy/engine/base.py101
-rw-r--r--lib/sqlalchemy/engine/interfaces.py18
-rw-r--r--lib/sqlalchemy/engine/reflection.py13
-rw-r--r--lib/sqlalchemy/engine/strategies.py11
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py5
-rw-r--r--lib/sqlalchemy/events.py23
-rw-r--r--lib/sqlalchemy/orm/mapper.py2
-rw-r--r--lib/sqlalchemy/orm/path_registry.py10
-rw-r--r--lib/sqlalchemy/orm/query.py47
-rw-r--r--lib/sqlalchemy/orm/strategies.py2
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py8
-rw-r--r--lib/sqlalchemy/orm/util.py22
-rw-r--r--lib/sqlalchemy/sql/compiler.py40
-rw-r--r--lib/sqlalchemy/sql/default_comparator.py21
-rw-r--r--lib/sqlalchemy/sql/elements.py6
-rw-r--r--lib/sqlalchemy/sql/operators.py5
-rw-r--r--lib/sqlalchemy/sql/schema.py116
-rw-r--r--lib/sqlalchemy/sql/selectable.py131
-rw-r--r--lib/sqlalchemy/sql/sqltypes.py29
-rw-r--r--lib/sqlalchemy/sql/type_api.py2
-rw-r--r--lib/sqlalchemy/testing/engines.py8
-rw-r--r--lib/sqlalchemy/testing/exclusions.py2
-rw-r--r--lib/sqlalchemy/testing/plugin/plugin_base.py5
-rw-r--r--lib/sqlalchemy/testing/profiling.py14
-rw-r--r--lib/sqlalchemy/testing/requirements.py5
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py6
-rw-r--r--lib/sqlalchemy/types.py1
-rw-r--r--lib/sqlalchemy/util/_collections.py9
-rw-r--r--test/aaa_profiling/test_memusage.py26
-rw-r--r--test/base/test_utils.py30
-rw-r--r--test/dialect/mssql/test_engine.py3
-rw-r--r--test/dialect/mssql/test_reflection.py15
-rw-r--r--test/dialect/mssql/test_types.py312
-rw-r--r--test/dialect/mysql/test_query.py39
-rw-r--r--test/dialect/postgresql/test_query.py6
-rw-r--r--test/dialect/postgresql/test_reflection.py12
-rw-r--r--test/dialect/test_oracle.py138
-rw-r--r--test/dialect/test_sqlite.py480
-rw-r--r--test/engine/test_parseconnect.py279
-rw-r--r--test/engine/test_reconnect.py4
-rw-r--r--test/engine/test_reflection.py1
-rw-r--r--test/orm/test_joins.py56
-rw-r--r--test/orm/test_mapper.py3
-rw-r--r--test/orm/test_of_type.py16
-rw-r--r--test/orm/test_query.py1
-rw-r--r--test/profiles.txt82
-rw-r--r--test/requirements.py31
-rw-r--r--test/sql/test_compiler.py44
-rw-r--r--test/sql/test_constraints.py1
-rw-r--r--test/sql/test_cte.py30
-rw-r--r--test/sql/test_metadata.py87
-rw-r--r--test/sql/test_operators.py28
-rw-r--r--test/sql/test_returning.py33
-rw-r--r--test/sql/test_types.py2
74 files changed, 2828 insertions, 736 deletions
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst
index 8ed2ea776..f10d48273 100644
--- a/doc/build/changelog/changelog_09.rst
+++ b/doc/build/changelog/changelog_09.rst
@@ -14,6 +14,52 @@
:version: 0.9.9
.. change::
+ :tags: bug, examples
+ :versions: 1.0.0
+
+ Updated the :ref:`examples_versioned_history` example such that
+ mapped columns are re-mapped to
+ match column names as well as grouping of columns; in particular,
+ this allows columns that are explicitly grouped in a same-column-named
+ joined inheritance scenario to be mapped in the same way in the
+ history mappings, avoiding warnings added in the 0.9 series
+ regarding this pattern and allowing the same view of attribute
+ keys.
+
+ .. change::
+ :tags: bug, examples
+ :versions: 1.0.0
+
+ Fixed a bug in the examples/generic_assocaitions/discriminator_on_association.py
+ example, where the subclasses of AddressAssociation were not being
+ mapped as "single table inheritance", leading to problems when trying
+ to use the mappings further.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 1.0.0
+ :tickets: 3251
+
+ Fixed a leak which would occur in the unsupported and highly
+ non-recommended use case of replacing a relationship on a fixed
+ mapped class many times, referring to an arbitrarily growing number of
+ target mappers. A warning is emitted when the old relationship is
+ replaced, however if the mapping were already used for querying, the
+ old relationship would still be referenced within some registries.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 1.0.0
+ :tickets: 3248
+
+ Fixed issue where the columns from a SELECT embedded in an
+ INSERT, either through the values clause or as a "from select",
+ would pollute the column types used in the result set produced by
+ the RETURNING clause when columns from both statements shared the
+ same name, leading to potential errors or mis-adaptation when
+ retrieving the returning rows.
+
+ .. change::
:tags: bug, orm, sqlite
:versions: 1.0.0
:tickets: 3241
diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst
index e63e023d9..6d99095d9 100644
--- a/doc/build/changelog/changelog_10.rst
+++ b/doc/build/changelog/changelog_10.rst
@@ -1,3 +1,4 @@
+
==============
1.0 Changelog
==============
@@ -22,6 +23,196 @@
on compatibility concerns, see :doc:`/changelog/migration_10`.
.. change::
+ :tags: feature, mssql
+ :tickets: 3039
+
+ SQL Server 2012 now recommends VARCHAR(max), NVARCHAR(max),
+ VARBINARY(max) for large text/binary types. The MSSQL dialect will
+ now respect this based on version detection, as well as the new
+ ``deprecate_large_types`` flag.
+
+ .. seealso::
+
+ :ref:`mssql_large_type_deprecation`
+
+ .. change::
+ :tags: bug, sqlite
+ :tickets: 3257
+
+ The SQLite dialect, when using the :class:`.sqlite.DATE`,
+ :class:`.sqlite.TIME`,
+ or :class:`.sqlite.DATETIME` types, and given a ``storage_format`` that
+ only renders numbers, will render the types in DDL as
+ ``DATE_CHAR``, ``TIME_CHAR``, and ``DATETIME_CHAR``, so that despite the
+ lack of alpha characters in the values, the column will still
+ deliver the "text affinity". Normally this is not needed, as the
+ textual values within the default storage formats already
+ imply text.
+
+ .. seealso::
+
+ :ref:`sqlite_datetime`
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 3266
+
+ The engine-level error handling and wrapping routines will now
+ take effect in all engine connection use cases, including
+ when user-custom connect routines are used via the
+ :paramref:`.create_engine.creator` parameter, as well as when
+ the :class:`.Connection` encounters a connection error on
+ revalidation.
+
+ .. seealso::
+
+ :ref:`change_3266`
+
+ .. change::
+ :tags: feature, oracle
+
+ New Oracle DDL features for tables, indexes: COMPRESS, BITMAP.
+ Patch courtesy Gabor Gombas.
+
+ .. change::
+ :tags: bug, oracle
+
+ An alias name will be properly quoted when referred to using the
+ ``%(name)s`` token inside the :meth:`.Select.with_hint` method.
+ Previously, the Oracle backend hadn't implemented this quoting.
+
+ .. change::
+ :tags: feature, oracle
+ :tickets: 3220
+
+ Added support for CTEs under Oracle. This includes some tweaks
+ to the aliasing syntax, as well as a new CTE feature
+ :meth:`.CTE.suffix_with`, which is useful for adding in special
+ Oracle-specific directives to the CTE.
+
+ .. seealso::
+
+ :ref:`change_3220`
+
+ .. change::
+ :tags: feature, mysql
+ :tickets: 3121
+
+ Updated the "supports_unicode_statements" flag to True for MySQLdb
+ and Pymysql under Python 2. This refers to the SQL statements
+ themselves, not the parameters, and affects issues such as table
+ and column names using non-ASCII characters. These drivers both
+ appear to support Python 2 Unicode objects without issue in modern
+ versions.
+
+ .. change::
+ :tags: bug, mysql
+ :tickets: 3263
+
+ The :meth:`.Operators.match` operator is now handled such that the
+ return type is not strictly assumed to be boolean; it now
+ returns a :class:`.Boolean` subclass called :class:`.MatchType`.
+ The type will still produce boolean behavior when used in Python
+ expressions, however the dialect can override its behavior at
+ result time. In the case of MySQL, while the MATCH operator
+ is typically used in a boolean context within an expression,
+ if one actually queries for the value of a match expression, a
+ floating point value is returned; this value is not compatible
+ with SQLAlchemy's C-based boolean processor, so MySQL's result-set
+ behavior now follows that of the :class:`.Float` type.
+ A new operator object ``notmatch_op`` is also added to better allow
+ dialects to define the negation of a match operation.
+
+ .. seealso::
+
+ :ref:`change_3263`
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 3264
+
+ The :meth:`.PGDialect.has_table` method will now query against
+ ``pg_catalog.pg_table_is_visible(c.oid)``, rather than testing
+ for an exact schema match, when the schema name is None; this
+ so that the method will also illustrate that temporary tables
+ are present. Note that this is a behavioral change, as Postgresql
+ allows a non-temporary table to silently overwrite an existing
+ temporary table of the same name, so this changes the behavior
+ of ``checkfirst`` in that unusual scenario.
+
+ .. seealso::
+
+ :ref:`change_3264`
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3260
+
+ Fixed bug in :meth:`.Table.tometadata` method where the
+ :class:`.CheckConstraint` associated with a :class:`.Boolean`
+ or :class:`.Enum` type object would be doubled in the target table.
+ The copy process now tracks the production of this constraint object
+ as local to a type object.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 3217
+
+ Added a parameter :paramref:`.Query.join.isouter` which is synonymous
+ with calling :meth:`.Query.outerjoin`; this flag is to provide a more
+ consistent interface compared to Core :meth:`.FromClause.join`.
+ Pull request courtesy Jonathan Vanasco.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3243
+
+ The behavioral contract of the :attr:`.ForeignKeyConstraint.columns`
+ collection has been made consistent; this attribute is now a
+ :class:`.ColumnCollection` like that of all other constraints and
+ is initialized at the point when the constraint is associated with
+ a :class:`.Table`.
+
+ .. seealso::
+
+ :ref:`change_3243`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3256
+
+ The :meth:`.PropComparator.of_type` modifier has been
+ improved in conjunction with loader directives such as
+ :func:`.joinedload` and :func:`.contains_eager` such that if
+ two :meth:`.PropComparator.of_type` modifiers of the same
+ base type/path are encountered, they will be joined together
+ into a single "polymorphic" entity, rather than replacing
+ the entity of type A with the one of type B. E.g.
+ a joinedload of ``A.b.of_type(BSub1)->BSub1.c`` combined with
+ joinedload of ``A.b.of_type(BSub2)->BSub2.c`` will create a
+ single joinedload of ``A.b.of_type((BSub1, BSub2)) -> BSub1.c, BSub2.c``,
+ without the need for the ``with_polymorphic`` to be explicit
+ in the query.
+
+ .. seealso::
+
+ :ref:`eagerloading_polymorphic_subtypes` - contains an updated
+ example illustrating the new format.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3245
+
+ The :attr:`.Column.key` attribute is now used as the source of
+ anonymous bound parameter names within expressions, to match the
+ existing use of this value as the key when rendered in an INSERT
+ or UPDATE statement. This allows :attr:`.Column.key` to be used
+ as a "substitute" string to work around a difficult column name
+ that doesn't translate well into a bound parameter name. Note that
+ the paramstyle is configurable on :func:`.create_engine` in any case,
+ and most DBAPIs today support a named and positional style.
+
+ .. change::
:tags: bug, sql
:pullreq: github:146
diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst
index bc7fa139f..562bb9f1b 100644
--- a/doc/build/changelog/migration_10.rst
+++ b/doc/build/changelog/migration_10.rst
@@ -276,6 +276,64 @@ running 0.9 in production.
:ticket:`2891`
+.. _change_3264:
+
+Postgresql ``has_table()`` now works for temporary tables
+---------------------------------------------------------
+
+This is a simple fix such that "has table" for temporary tables now works,
+so that code like the following may proceed::
+
+ from sqlalchemy import *
+
+ metadata = MetaData()
+ user_tmp = Table(
+ "user_tmp", metadata,
+ Column("id", INT, primary_key=True),
+ Column('name', VARCHAR(50)),
+ prefixes=['TEMPORARY']
+ )
+
+ e = create_engine("postgresql://scott:tiger@localhost/test", echo='debug')
+ with e.begin() as conn:
+ user_tmp.create(conn, checkfirst=True)
+
+ # checkfirst will succeed
+ user_tmp.create(conn, checkfirst=True)
+
+The very unlikely case that this behavior will cause a non-failing application
+to behave differently, is because Postgresql allows a non-temporary table
+to silently overwrite a temporary table. So code like the following will
+now act completely differently, no longer creating the real table following
+the temporary table::
+
+ from sqlalchemy import *
+
+ metadata = MetaData()
+ user_tmp = Table(
+ "user_tmp", metadata,
+ Column("id", INT, primary_key=True),
+ Column('name', VARCHAR(50)),
+ prefixes=['TEMPORARY']
+ )
+
+ e = create_engine("postgresql://scott:tiger@localhost/test", echo='debug')
+ with e.begin() as conn:
+ user_tmp.create(conn, checkfirst=True)
+
+ m2 = MetaData()
+ user = Table(
+ "user_tmp", m2,
+ Column("id", INT, primary_key=True),
+ Column('name', VARCHAR(50)),
+ )
+
+ # in 0.9, *will create* the new table, overwriting the old one.
+ # in 1.0, *will not create* the new table
+ user.create(conn, checkfirst=True)
+
+:ticket:`3264`
+
.. _feature_gh134:
Postgresql FILTER keyword
@@ -814,6 +872,29 @@ labeled uniquely.
:ticket:`3170`
+.. _change_3266:
+
+DBAPI exception wrapping and handle_error() event improvements
+--------------------------------------------------------------
+
+SQLAlchemy's wrapping of DBAPI exceptions was not taking place in the
+case where a :class:`.Connection` object was invalidated, and then tried
+to reconnect and encountered an error; this has been resolved.
+
+Additionally, the recently added :meth:`.ConnectionEvents.handle_error`
+event is now invoked for errors that occur upon initial connect, upon
+reconnect, and when :func:`.create_engine` is used given a custom connection
+function via :paramref:`.create_engine.creator`.
+
+The :class:`.ExceptionContext` object has a new datamember
+:attr:`.ExceptionContext.engine` that will always refer to the :class:`.Engine`
+in use, in those cases when the :class:`.Connection` object is not available
+(e.g. on initial connect).
+
+
+:ticket:`3266`
+
+
.. _behavioral_changes_orm_10:
Behavioral Changes - ORM
@@ -1427,7 +1508,21 @@ A :class:`.Table` can be set up for reflection by passing
:ticket:`3027`
+.. _change_3243:
+ForeignKeyConstraint.columns is now a ColumnCollection
+------------------------------------------------------
+
+:attr:`.ForeignKeyConstraint.columns` was previously a plain list
+containing either strings or :class:`.Column` objects, depending on
+how the :class:`.ForeignKeyConstraint` was constructed and whether it was
+associated with a table. The collection is now a :class:`.ColumnCollection`,
+and is only initialized after the :class:`.ForeignKeyConstraint` is
+associated with a :class:`.Table`. A new accessor
+:attr:`.ForeignKeyConstraint.column_keys`
+is added to unconditionally return string keys for the local set of
+columns regardless of how the object was constructed or its current
+state.
Dialect Changes
===============
@@ -1475,6 +1570,37 @@ again works on MySQL.
:ticket:`3186`
+.. _change_3263:
+
+The match() operator now returns an agnostic MatchType compatible with MySQL's floating point return value
+----------------------------------------------------------------------------------------------------------
+
+The return type of a :meth:`.Operators.match` expression is now a new type
+called :class:`.MatchType`. This is a subclass of :class:`.Boolean`,
+that can be intercepted by the dialect in order to produce a different
+result type at SQL execution time.
+
+Code like the following will now function correctly and return floating points
+on MySQL::
+
+ >>> connection.execute(
+ ... select([
+ ... matchtable.c.title.match('Agile Ruby Programming').label('ruby'),
+ ... matchtable.c.title.match('Dive Python').label('python'),
+ ... matchtable.c.title
+ ... ]).order_by(matchtable.c.id)
+ ... )
+ [
+ (2.0, 0.0, 'Agile Web Development with Ruby On Rails'),
+ (0.0, 2.0, 'Dive Into Python'),
+ (2.0, 0.0, "Programming Matz's Ruby"),
+ (0.0, 0.0, 'The Definitive Guide to Django'),
+ (0.0, 1.0, 'Python in a Nutshell')
+ ]
+
+
+:ticket:`3263`
+
.. _change_3182:
PyODBC driver name is required with hostname-based SQL Server connections
@@ -1493,6 +1619,14 @@ when using ODBC to avoid this issue entirely.
:ticket:`3182`
+SQL Server 2012 large text / binary types render as VARCHAR, NVARCHAR, VARBINARY
+--------------------------------------------------------------------------------
+
+The rendering of the :class:`.Text`, :class:`.UnicodeText`, and :class:`.LargeBinary`
+types has been changed for SQL Server 2012 and greater, with options
+to control the behavior completely, based on deprecation guidelines from
+Microsoft. See :ref:`mssql_large_type_deprecation` for details.
+
.. _change_3204:
SQLite/Oracle have distinct methods for temporary table/view name reporting
@@ -1513,6 +1647,33 @@ reflection from temp tables as well, which is :ticket:`3203`.
:ticket:`3204`
+.. _change_3220:
+
+Improved support for CTEs in Oracle
+-----------------------------------
+
+CTE support has been fixed up for Oracle, and there is also a new feature
+:meth:`.CTE.with_suffixes` that can assist with Oracle's special directives::
+
+ included_parts = select([
+ part.c.sub_part, part.c.part, part.c.quantity
+ ]).where(part.c.part == "p1").\
+ cte(name="included_parts", recursive=True).\
+ suffix_with(
+ "search depth first by part set ord1",
+ "cycle part set y_cycle to 1 default 0", dialect='oracle')
+
+:ticket:`3220`
+
+New Oracle Keywords for DDL
+-----------------------------
+
+Keywords such as COMPRESS, ON COMMIT, BITMAP:
+
+:ref:`oracle_table_options`
+
+:ref:`oracle_index_options`
+
.. _change_2984:
Drizzle Dialect is now an External Dialect
diff --git a/doc/build/core/selectable.rst b/doc/build/core/selectable.rst
index 52acb28e5..03ebeb4ab 100644
--- a/doc/build/core/selectable.rst
+++ b/doc/build/core/selectable.rst
@@ -60,6 +60,9 @@ elements are themselves :class:`.ColumnElement` subclasses).
.. autoclass:: HasPrefixes
:members:
+.. autoclass:: HasSuffixes
+ :members:
+
.. autoclass:: Join
:members:
:inherited-members:
diff --git a/doc/build/core/types.rst b/doc/build/core/types.rst
index 14e30e46d..22b36a648 100644
--- a/doc/build/core/types.rst
+++ b/doc/build/core/types.rst
@@ -67,6 +67,9 @@ Standard Types`_ and the other sections of this chapter.
.. autoclass:: LargeBinary
:members:
+.. autoclass:: MatchType
+ :members:
+
.. autoclass:: Numeric
:members:
diff --git a/doc/build/faq.rst b/doc/build/faq.rst
index 54f761b3b..555fdc9e1 100644
--- a/doc/build/faq.rst
+++ b/doc/build/faq.rst
@@ -757,14 +757,14 @@ using a recipe like the following::
def before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
conn.info.setdefault('query_start_time', []).append(time.time())
- logger.debug("Start Query: %s" % statement)
+ logger.debug("Start Query: %s", statement)
@event.listens_for(Engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
total = time.time() - conn.info['query_start_time'].pop(-1)
logger.debug("Query Complete!")
- logger.debug("Total Time: %f" % total)
+ logger.debug("Total Time: %f", total)
Above, we use the :meth:`.ConnectionEvents.before_cursor_execute` and
:meth:`.ConnectionEvents.after_cursor_execute` events to establish an interception
diff --git a/doc/build/orm/examples.rst b/doc/build/orm/examples.rst
index 93478381a..4db7c00dc 100644
--- a/doc/build/orm/examples.rst
+++ b/doc/build/orm/examples.rst
@@ -86,6 +86,8 @@ XML Persistence
Versioning Objects
------------------------
+.. _examples_versioned_history:
+
Versioning with a History Table
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst
index 9f01a3e24..0713634bc 100644
--- a/doc/build/orm/inheritance.rst
+++ b/doc/build/orm/inheritance.rst
@@ -475,6 +475,8 @@ subselect back to the parent ``companies`` table.
:func:`.orm.aliased` and :func:`.orm.with_polymorphic` constructs in conjunction
with :meth:`.Query.join`, ``any()`` and ``has()``.
+.. _eagerloading_polymorphic_subtypes:
+
Eager Loading of Specific or Polymorphic Subtypes
++++++++++++++++++++++++++++++++++++++++++++++++++
@@ -491,7 +493,7 @@ objects, querying the ``employee`` and ``engineer`` tables simultaneously::
)
)
-As is the case with :meth:`.Query.join`, :func:`~sqlalchemy.orm.interfaces.PropComparator.of_type`
+As is the case with :meth:`.Query.join`, :meth:`~PropComparator.of_type`
also can be used with eager loading and :func:`.orm.with_polymorphic`
at the same time, so that all sub-attributes of all referenced subtypes
can be loaded::
@@ -513,6 +515,23 @@ can be loaded::
:func:`~sqlalchemy.orm.interfaces.PropComparator.of_type`, supporting
single target types as well as :func:`.orm.with_polymorphic` targets.
+Another option for the above query is to state the two subtypes separately;
+the :func:`.joinedload` directive should detect this and create the
+above ``with_polymorphic`` construct automatically::
+
+ session.query(Company).\
+ options(
+ joinedload(Company.employees.of_type(Manager)),
+ joinedload(Company.employees.of_type(Engineer)),
+ )
+ )
+
+.. versionadded:: 1.0
+ Eager loaders such as :func:`.joinedload` will create a polymorphic
+ entity when multiple overlapping :meth:`~PropComparator.of_type`
+ directives are encountered.
+
+
Single Table Inheritance
------------------------
diff --git a/examples/generic_associations/discriminator_on_association.py b/examples/generic_associations/discriminator_on_association.py
index e03cfec00..7bb04cf85 100644
--- a/examples/generic_associations/discriminator_on_association.py
+++ b/examples/generic_associations/discriminator_on_association.py
@@ -84,6 +84,7 @@ class HasAddresses(object):
"%sAddressAssociation" % name,
(AddressAssociation, ),
dict(
+ __tablename__=None,
__mapper_args__={
"polymorphic_identity": discriminator
}
diff --git a/examples/versioned_history/history_meta.py b/examples/versioned_history/history_meta.py
index f9e979a6a..6d7b137eb 100644
--- a/examples/versioned_history/history_meta.py
+++ b/examples/versioned_history/history_meta.py
@@ -4,19 +4,22 @@ from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import mapper, attributes, object_mapper
from sqlalchemy.orm.exc import UnmappedColumnError
from sqlalchemy import Table, Column, ForeignKeyConstraint, Integer, DateTime
-from sqlalchemy import event
+from sqlalchemy import event, util
import datetime
from sqlalchemy.orm.properties import RelationshipProperty
+
def col_references_table(col, table):
for fk in col.foreign_keys:
if fk.references(table):
return True
return False
+
def _is_versioning_col(col):
return "version_meta" in col.info
+
def _history_mapper(local_mapper):
cls = local_mapper.class_
@@ -33,52 +36,77 @@ def _history_mapper(local_mapper):
super_fks = []
def _col_copy(col):
+ orig = col
col = col.copy()
+ orig.info['history_copy'] = col
col.unique = False
col.default = col.server_default = None
return col
- if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
+ properties = util.OrderedDict()
+ if not super_mapper or \
+ local_mapper.local_table is not super_mapper.local_table:
cols = []
+ version_meta = {"version_meta": True} # add column.info to identify
+ # columns specific to versioning
+
for column in local_mapper.local_table.c:
if _is_versioning_col(column):
continue
col = _col_copy(column)
- if super_mapper and col_references_table(column, super_mapper.local_table):
- super_fks.append((col.key, list(super_history_mapper.local_table.primary_key)[0]))
+ if super_mapper and \
+ col_references_table(column, super_mapper.local_table):
+ super_fks.append(
+ (
+ col.key,
+ list(super_history_mapper.local_table.primary_key)[0]
+ )
+ )
cols.append(col)
if column is local_mapper.polymorphic_on:
polymorphic_on = col
- if super_mapper:
- super_fks.append(('version', super_history_mapper.local_table.c.version))
+ orig_prop = local_mapper.get_property_by_column(column)
+ # carry over column re-mappings
+ if len(orig_prop.columns) > 1 or \
+ orig_prop.columns[0].key != orig_prop.key:
+ properties[orig_prop.key] = tuple(
+ col.info['history_copy'] for col in orig_prop.columns)
- version_meta = {"version_meta": True} # add column.info to identify
- # columns specific to versioning
+ if super_mapper:
+ super_fks.append(
+ (
+ 'version', super_history_mapper.local_table.c.version
+ )
+ )
# "version" stores the integer version id. This column is
# required.
- cols.append(Column('version', Integer, primary_key=True,
- autoincrement=False, info=version_meta))
+ cols.append(
+ Column(
+ 'version', Integer, primary_key=True,
+ autoincrement=False, info=version_meta))
# "changed" column stores the UTC timestamp of when the
# history row was created.
# This column is optional and can be omitted.
- cols.append(Column('changed', DateTime,
- default=datetime.datetime.utcnow,
- info=version_meta))
+ cols.append(Column(
+ 'changed', DateTime,
+ default=datetime.datetime.utcnow,
+ info=version_meta))
if super_fks:
cols.append(ForeignKeyConstraint(*zip(*super_fks)))
- table = Table(local_mapper.local_table.name + '_history',
- local_mapper.local_table.metadata,
- *cols,
- schema=local_mapper.local_table.schema
+ table = Table(
+ local_mapper.local_table.name + '_history',
+ local_mapper.local_table.metadata,
+ *cols,
+ schema=local_mapper.local_table.schema
)
else:
# single table inheritance. take any additional columns that may have
@@ -91,24 +119,33 @@ def _history_mapper(local_mapper):
if super_history_mapper:
bases = (super_history_mapper.class_,)
+
+ if table is not None:
+ properties['changed'] = (
+ (table.c.changed, ) +
+ tuple(super_history_mapper.attrs.changed.columns)
+ )
+
else:
bases = local_mapper.base_mapper.class_.__bases__
versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})
m = mapper(
- versioned_cls,
- table,
- inherits=super_history_mapper,
- polymorphic_on=polymorphic_on,
- polymorphic_identity=local_mapper.polymorphic_identity
- )
+ versioned_cls,
+ table,
+ inherits=super_history_mapper,
+ polymorphic_on=polymorphic_on,
+ polymorphic_identity=local_mapper.polymorphic_identity,
+ properties=properties
+ )
cls.__history_mapper__ = m
if not super_history_mapper:
local_mapper.local_table.append_column(
Column('version', Integer, default=1, nullable=False)
)
- local_mapper.add_property("version", local_mapper.local_table.c.version)
+ local_mapper.add_property(
+ "version", local_mapper.local_table.c.version)
class Versioned(object):
@@ -126,6 +163,7 @@ def versioned_objects(iter):
if hasattr(obj, '__history_mapper__'):
yield obj
+
def create_version(obj, session, deleted=False):
obj_mapper = object_mapper(obj)
history_mapper = obj.__history_mapper__
@@ -137,7 +175,10 @@ def create_version(obj, session, deleted=False):
obj_changed = False
- for om, hm in zip(obj_mapper.iterate_to_root(), history_mapper.iterate_to_root()):
+ for om, hm in zip(
+ obj_mapper.iterate_to_root(),
+ history_mapper.iterate_to_root()
+ ):
if hm.single:
continue
@@ -157,11 +198,12 @@ def create_version(obj, session, deleted=False):
# in the case of single table inheritance, there may be
# columns on the mapped table intended for the subclass only.
# the "unmapped" status of the subclass column on the
- # base class is a feature of the declarative module as of sqla 0.5.2.
+ # base class is a feature of the declarative module.
continue
- # expired object attributes and also deferred cols might not be in the
- # dict. force it to load no matter what by using getattr().
+ # expired object attributes and also deferred cols might not
+ # be in the dict. force it to load no matter what by
+ # using getattr().
if prop.key not in obj_state.dict:
getattr(obj, prop.key)
@@ -182,8 +224,9 @@ def create_version(obj, session, deleted=False):
# check those too
for prop in obj_mapper.iterate_properties:
if isinstance(prop, RelationshipProperty) and \
- attributes.get_history(obj, prop.key,
- passive=attributes.PASSIVE_NO_INITIALIZE).has_changes():
+ attributes.get_history(
+ obj, prop.key,
+ passive=attributes.PASSIVE_NO_INITIALIZE).has_changes():
for p in prop.local_columns:
if p.foreign_keys:
obj_changed = True
@@ -201,6 +244,7 @@ def create_version(obj, session, deleted=False):
session.add(hist)
obj.version += 1
+
def versioned_session(session):
@event.listens_for(session, 'before_flush')
def before_flush(session, flush_context, instances):
diff --git a/examples/versioned_history/test_versioning.py b/examples/versioned_history/test_versioning.py
index 874223d62..dde73a5ae 100644
--- a/examples/versioned_history/test_versioning.py
+++ b/examples/versioned_history/test_versioning.py
@@ -1,20 +1,28 @@
-"""Unit tests illustrating usage of the ``history_meta.py`` module functions."""
+"""Unit tests illustrating usage of the ``history_meta.py``
+module functions."""
from unittest import TestCase
from sqlalchemy.ext.declarative import declarative_base
from .history_meta import Versioned, versioned_session
-from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Boolean
-from sqlalchemy.orm import clear_mappers, Session, deferred, relationship
+from sqlalchemy import create_engine, Column, Integer, String, \
+ ForeignKey, Boolean, select
+from sqlalchemy.orm import clear_mappers, Session, deferred, relationship, \
+ column_property
from sqlalchemy.testing import AssertsCompiledSQL, eq_, assert_raises
from sqlalchemy.testing.entities import ComparableEntity
from sqlalchemy.orm import exc as orm_exc
+import warnings
+
+warnings.simplefilter("error")
engine = None
+
def setup_module():
global engine
engine = create_engine('sqlite://', echo=True)
+
class TestVersioning(TestCase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -52,14 +60,16 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
SomeClassHistory = SomeClass.__history_mapper__.class_
eq_(
- sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+ sess.query(SomeClassHistory).filter(
+ SomeClassHistory.version == 1).all(),
[SomeClassHistory(version=1, name='sc1')]
)
sc.name = 'sc1modified2'
eq_(
- sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ sess.query(SomeClassHistory).order_by(
+ SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1'),
SomeClassHistory(version=2, name='sc1modified')
@@ -76,7 +86,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
sess.commit()
eq_(
- sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ sess.query(SomeClassHistory).order_by(
+ SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1'),
SomeClassHistory(version=2, name='sc1modified')
@@ -87,7 +98,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
sess.commit()
eq_(
- sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ sess.query(SomeClassHistory).order_by(
+ SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1'),
SomeClassHistory(version=2, name='sc1modified'),
@@ -164,13 +176,13 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
SomeClassHistory = SomeClass.__history_mapper__.class_
eq_(
- sess.query(SomeClassHistory.boole).order_by(SomeClassHistory.id).all(),
+ sess.query(SomeClassHistory.boole).order_by(
+ SomeClassHistory.id).all(),
[(True, ), (None, )]
)
eq_(sc.version, 3)
-
def test_deferred(self):
"""test versioning of unloaded, deferred columns."""
@@ -199,11 +211,11 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
SomeClassHistory = SomeClass.__history_mapper__.class_
eq_(
- sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+ sess.query(SomeClassHistory).filter(
+ SomeClassHistory.version == 1).all(),
[SomeClassHistory(version=1, name='sc1', data='somedata')]
)
-
def test_joined_inheritance(self):
class BaseClass(Versioned, self.Base, ComparableEntity):
__tablename__ = 'basetable'
@@ -212,12 +224,17 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
name = Column(String(50))
type = Column(String(20))
- __mapper_args__ = {'polymorphic_on': type, 'polymorphic_identity': 'base'}
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'polymorphic_identity': 'base'}
class SubClassSeparatePk(BaseClass):
__tablename__ = 'subtable1'
- id = Column(Integer, primary_key=True)
+ id = column_property(
+ Column(Integer, primary_key=True),
+ BaseClass.id
+ )
base_id = Column(Integer, ForeignKey('basetable.id'))
subdata1 = Column(String(50))
@@ -226,7 +243,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
class SubClassSamePk(BaseClass):
__tablename__ = 'subtable2'
- id = Column(Integer, ForeignKey('basetable.id'), primary_key=True)
+ id = Column(
+ Integer, ForeignKey('basetable.id'), primary_key=True)
subdata2 = Column(String(50))
__mapper_args__ = {'polymorphic_identity': 'same'}
@@ -246,38 +264,50 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
sess.commit()
BaseClassHistory = BaseClass.__history_mapper__.class_
- SubClassSeparatePkHistory = SubClassSeparatePk.__history_mapper__.class_
+ SubClassSeparatePkHistory = \
+ SubClassSeparatePk.__history_mapper__.class_
SubClassSamePkHistory = SubClassSamePk.__history_mapper__.class_
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(),
[
- SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
+ SubClassSeparatePkHistory(
+ id=1, name='sep1', type='sep', version=1),
BaseClassHistory(id=2, name='base1', type='base', version=1),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=1)
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=1)
]
)
same1.subdata2 = 'same1subdatamod2'
eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+ sess.query(BaseClassHistory).order_by(
+ BaseClassHistory.id, BaseClassHistory.version).all(),
[
- SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
+ SubClassSeparatePkHistory(
+ id=1, name='sep1', type='sep', version=1),
BaseClassHistory(id=2, name='base1', type='base', version=1),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=1),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=2)
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=1),
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=2)
]
)
base1.name = 'base1mod2'
eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+ sess.query(BaseClassHistory).order_by(
+ BaseClassHistory.id, BaseClassHistory.version).all(),
[
- SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
+ SubClassSeparatePkHistory(
+ id=1, name='sep1', type='sep', version=1),
BaseClassHistory(id=2, name='base1', type='base', version=1),
- BaseClassHistory(id=2, name='base1mod', type='base', version=2),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=1),
- SubClassSamePkHistory(id=3, name='same1', type='same', version=2)
+ BaseClassHistory(
+ id=2, name='base1mod', type='base', version=2),
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=1),
+ SubClassSamePkHistory(
+ id=3, name='same1', type='same', version=2)
]
)
@@ -289,13 +319,17 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
name = Column(String(50))
type = Column(String(20))
- __mapper_args__ = {'polymorphic_on': type,
- 'polymorphic_identity': 'base'}
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'polymorphic_identity': 'base'}
class SubClass(BaseClass):
__tablename__ = 'subtable'
- id = Column(Integer, primary_key=True)
+ id = column_property(
+ Column(Integer, primary_key=True),
+ BaseClass.id
+ )
base_id = Column(Integer, ForeignKey('basetable.id'))
subdata1 = Column(String(50))
@@ -316,12 +350,18 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
q = sess.query(SubSubHistory)
self.assert_compile(
q,
+
+
"SELECT "
"subsubtable_history.id AS subsubtable_history_id, "
"subtable_history.id AS subtable_history_id, "
"basetable_history.id AS basetable_history_id, "
+ "subsubtable_history.changed AS subsubtable_history_changed, "
+ "subtable_history.changed AS subtable_history_changed, "
+ "basetable_history.changed AS basetable_history_changed, "
+
"basetable_history.name AS basetable_history_name, "
"basetable_history.type AS basetable_history_type, "
@@ -330,9 +370,6 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
"subtable_history.version AS subtable_history_version, "
"basetable_history.version AS basetable_history_version, "
- "subsubtable_history.changed AS subsubtable_history_changed, "
- "subtable_history.changed AS subtable_history_changed, "
- "basetable_history.changed AS basetable_history_changed, "
"subtable_history.base_id AS subtable_history_base_id, "
"subtable_history.subdata1 AS subtable_history_subdata1, "
@@ -342,7 +379,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
"ON basetable_history.id = subtable_history.base_id "
"AND basetable_history.version = subtable_history.version "
"JOIN subsubtable_history ON subtable_history.id = "
- "subsubtable_history.id AND subtable_history.version = subsubtable_history.version"
+ "subsubtable_history.id AND subtable_history.version = "
+ "subsubtable_history.version"
)
ssc = SubSubClass(name='ss1', subdata1='sd1', subdata2='sd2')
@@ -360,10 +398,53 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
[SubSubHistory(name='ss1', subdata1='sd1',
subdata2='sd2', type='subsub', version=1)]
)
- eq_(ssc, SubSubClass(name='ss1', subdata1='sd11',
- subdata2='sd22', version=2))
+ eq_(ssc, SubSubClass(
+ name='ss1', subdata1='sd11',
+ subdata2='sd22', version=2))
+
+ def test_joined_inheritance_changed(self):
+ class BaseClass(Versioned, self.Base, ComparableEntity):
+ __tablename__ = 'basetable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ type = Column(String(20))
+
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'polymorphic_identity': 'base'
+ }
+
+ class SubClass(BaseClass):
+ __tablename__ = 'subtable'
+
+ id = Column(Integer, ForeignKey('basetable.id'), primary_key=True)
+
+ __mapper_args__ = {'polymorphic_identity': 'sep'}
+
+ self.create_tables()
+
+ BaseClassHistory = BaseClass.__history_mapper__.class_
+ SubClassHistory = SubClass.__history_mapper__.class_
+ sess = self.session
+ s1 = SubClass(name='s1')
+ sess.add(s1)
+ sess.commit()
+
+ s1.name = 's2'
+ sess.commit()
+ actual_changed_base = sess.scalar(
+ select([BaseClass.__history_mapper__.local_table.c.changed]))
+ actual_changed_sub = sess.scalar(
+ select([SubClass.__history_mapper__.local_table.c.changed]))
+ h1 = sess.query(BaseClassHistory).first()
+ eq_(h1.changed, actual_changed_base)
+ eq_(h1.changed, actual_changed_sub)
+ h1 = sess.query(SubClassHistory).first()
+ eq_(h1.changed, actual_changed_base)
+ eq_(h1.changed, actual_changed_sub)
def test_single_inheritance(self):
class BaseClass(Versioned, self.Base, ComparableEntity):
@@ -372,8 +453,9 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
id = Column(Integer, primary_key=True)
name = Column(String(50))
type = Column(String(50))
- __mapper_args__ = {'polymorphic_on': type,
- 'polymorphic_identity': 'base'}
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'polymorphic_identity': 'base'}
class SubClass(BaseClass):
@@ -396,8 +478,8 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
SubClassHistory = SubClass.__history_mapper__.class_
eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id,
- BaseClassHistory.version).all(),
+ sess.query(BaseClassHistory).order_by(
+ BaseClassHistory.id, BaseClassHistory.version).all(),
[BaseClassHistory(id=1, name='b1', type='base', version=1)]
)
@@ -405,11 +487,12 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
b1.name = 'b1modified2'
eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id,
- BaseClassHistory.version).all(),
+ sess.query(BaseClassHistory).order_by(
+ BaseClassHistory.id, BaseClassHistory.version).all(),
[
BaseClassHistory(id=1, name='b1', type='base', version=1),
- BaseClassHistory(id=1, name='b1modified', type='base', version=2),
+ BaseClassHistory(
+ id=1, name='b1modified', type='base', version=2),
SubClassHistory(id=2, name='s1', type='sub', version=1)
]
)
@@ -475,14 +558,16 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
assert sc.version == 2
eq_(
- sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+ sess.query(SomeClassHistory).filter(
+ SomeClassHistory.version == 1).all(),
[SomeClassHistory(version=1, name='sc1', related_id=None)]
)
sc.related = None
eq_(
- sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+ sess.query(SomeClassHistory).order_by(
+ SomeClassHistory.version).all(),
[
SomeClassHistory(version=1, name='sc1', related_id=None),
SomeClassHistory(version=2, name='sc1', related_id=sr1.id)
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index dad02ee0f..5d84975c0 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -226,6 +226,53 @@ The DATE and TIME types are not available for MSSQL 2005 and
previous - if a server version below 2008 is detected, DDL
for these types will be issued as DATETIME.
+.. _mssql_large_type_deprecation:
+
+Large Text/Binary Type Deprecation
+----------------------------------
+
+Per `SQL Server 2012/2014 Documentation <http://technet.microsoft.com/en-us/library/ms187993.aspx>`_,
+the ``NTEXT``, ``TEXT`` and ``IMAGE`` datatypes are to be removed from SQL Server
+in a future release. SQLAlchemy normally relates these types to the
+:class:`.UnicodeText`, :class:`.Text` and :class:`.LargeBinary` datatypes.
+
+In order to accommodate this change, a new flag ``deprecate_large_types``
+is added to the dialect, which will be automatically set based on detection
+of the server version in use, if not otherwise set by the user. The
+behavior of this flag is as follows:
+
+* When this flag is ``True``, the :class:`.UnicodeText`, :class:`.Text` and
+ :class:`.LargeBinary` datatypes, when used to render DDL, will render the
+ types ``NVARCHAR(max)``, ``VARCHAR(max)``, and ``VARBINARY(max)``,
+ respectively. This is a new behavior as of the addition of this flag.
+
+* When this flag is ``False``, the :class:`.UnicodeText`, :class:`.Text` and
+ :class:`.LargeBinary` datatypes, when used to render DDL, will render the
+ types ``NTEXT``, ``TEXT``, and ``IMAGE``,
+ respectively. This is the long-standing behavior of these types.
+
+* The flag begins with the value ``None``, before a database connection is
+ established. If the dialect is used to render DDL without the flag being
+ set, it is interpreted the same as ``False``.
+
+* On first connection, the dialect detects if SQL Server version 2012 or greater
+ is in use; if the flag is still at ``None``, it sets it to ``True`` or
+ ``False`` based on whether 2012 or greater is detected.
+
+* The flag can be set to either ``True`` or ``False`` when the dialect
+ is created, typically via :func:`.create_engine`::
+
+ eng = create_engine("mssql+pymssql://user:pass@host/db",
+ deprecate_large_types=True)
+
+* Complete control over whether the "old" or "new" types are rendered is
+ available in all SQLAlchemy versions by using the UPPERCASE type objects
+ instead: :class:`.NVARCHAR`, :class:`.VARCHAR`, :class:`.types.VARBINARY`,
+ :class:`.TEXT`, :class:`.mssql.NTEXT`, :class:`.mssql.IMAGE` will always remain
+ fixed and always output exactly that type.
+
+.. versionadded:: 1.0.0
+
.. _mssql_indexes:
Clustered Index Support
@@ -367,19 +414,20 @@ import operator
import re
from ... import sql, schema as sa_schema, exc, util
-from ...sql import compiler, expression, \
- util as sql_util, cast
+from ...sql import compiler, expression, util as sql_util
from ... import engine
from ...engine import reflection, default
from ... import types as sqltypes
from ...types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \
FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\
- VARBINARY, TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
+ TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
from ...util import update_wrapper
from . import information_schema as ischema
+# http://sqlserverbuilds.blogspot.com/
+MS_2012_VERSION = (11,)
MS_2008_VERSION = (10,)
MS_2005_VERSION = (9,)
MS_2000_VERSION = (8,)
@@ -545,6 +593,26 @@ class NTEXT(sqltypes.UnicodeText):
__visit_name__ = 'NTEXT'
+class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary):
+ """The MSSQL VARBINARY type.
+
+ This type extends both :class:`.types.VARBINARY` and
+ :class:`.types.LargeBinary`. In "deprecate_large_types" mode,
+ the :class:`.types.LargeBinary` type will produce ``VARBINARY(max)``
+ on SQL Server.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :ref:`mssql_large_type_deprecation`
+
+
+
+ """
+ __visit_name__ = 'VARBINARY'
+
+
class IMAGE(sqltypes.LargeBinary):
__visit_name__ = 'IMAGE'
@@ -683,8 +751,17 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
def visit_unicode(self, type_):
return self.visit_NVARCHAR(type_)
+ def visit_text(self, type_):
+ if self.dialect.deprecate_large_types:
+ return self.visit_VARCHAR(type_)
+ else:
+ return self.visit_TEXT(type_)
+
def visit_unicode_text(self, type_):
- return self.visit_NTEXT(type_)
+ if self.dialect.deprecate_large_types:
+ return self.visit_NVARCHAR(type_)
+ else:
+ return self.visit_NTEXT(type_)
def visit_NTEXT(self, type_):
return self._extend("NTEXT", type_)
@@ -717,7 +794,10 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
return self.visit_TIME(type_)
def visit_large_binary(self, type_):
- return self.visit_IMAGE(type_)
+ if self.dialect.deprecate_large_types:
+ return self.visit_VARBINARY(type_)
+ else:
+ return self.visit_IMAGE(type_)
def visit_IMAGE(self, type_):
return "IMAGE"
@@ -1370,13 +1450,15 @@ class MSDialect(default.DefaultDialect):
query_timeout=None,
use_scope_identity=True,
max_identifier_length=None,
- schema_name="dbo", **opts):
+ schema_name="dbo",
+ deprecate_large_types=None, **opts):
self.query_timeout = int(query_timeout or 0)
self.schema_name = schema_name
self.use_scope_identity = use_scope_identity
self.max_identifier_length = int(max_identifier_length or 0) or \
self.max_identifier_length
+ self.deprecate_large_types = deprecate_large_types
super(MSDialect, self).__init__(**opts)
def do_savepoint(self, connection, name):
@@ -1390,6 +1472,9 @@ class MSDialect(default.DefaultDialect):
def initialize(self, connection):
super(MSDialect, self).initialize(connection)
+ self._setup_version_attributes()
+
+ def _setup_version_attributes(self):
if self.server_version_info[0] not in list(range(8, 17)):
# FreeTDS with version 4.2 seems to report here
# a number like "95.10.255". Don't know what
@@ -1405,6 +1490,9 @@ class MSDialect(default.DefaultDialect):
self.implicit_returning = True
if self.server_version_info >= MS_2008_VERSION:
self.supports_multivalues_insert = True
+ if self.deprecate_large_types is None:
+ self.deprecate_large_types = \
+ self.server_version_info >= MS_2012_VERSION
def _get_default_schema_name(self, connection):
if self.server_version_info < MS_2005_VERSION:
@@ -1592,12 +1680,11 @@ class MSDialect(default.DefaultDialect):
if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText,
MSNText, MSBinary, MSVarBinary,
sqltypes.LargeBinary):
+ if charlen == -1:
+ charlen = 'max'
kwargs['length'] = charlen
if collation:
kwargs['collation'] = collation
- if coltype == MSText or \
- (coltype in (MSString, MSNVarchar) and charlen == -1):
- kwargs.pop('length')
if coltype is None:
util.warn(
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 2fb054d0c..c868f58b2 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -602,6 +602,14 @@ class _StringType(sqltypes.String):
to_inspect=[_StringType, sqltypes.String])
+class _MatchType(sqltypes.Float, sqltypes.MatchType):
+ def __init__(self, **kw):
+ # TODO: float arguments?
+ sqltypes.Float.__init__(self)
+ sqltypes.MatchType.__init__(self)
+
+
+
class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""MySQL NUMERIC type."""
@@ -1544,6 +1552,7 @@ colspecs = {
sqltypes.Float: FLOAT,
sqltypes.Time: TIME,
sqltypes.Enum: ENUM,
+ sqltypes.MatchType: _MatchType
}
# Everything 3.23 through 5.1 excepting OpenGIS types.
@@ -2593,7 +2602,7 @@ class MySQLDialect(default.DefaultDialect):
pass
else:
self.logger.info(
- "Converting unknown KEY type %s to a plain KEY" % flavor)
+ "Converting unknown KEY type %s to a plain KEY", flavor)
pass
index_d = {}
index_d['name'] = spec['name']
diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py
index 73210d67a..893c6a9e2 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqldb.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py
@@ -77,7 +77,7 @@ class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer):
class MySQLDialect_mysqldb(MySQLDialect):
driver = 'mysqldb'
- supports_unicode_statements = False
+ supports_unicode_statements = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py
index 31226cea0..8df2ba03f 100644
--- a/lib/sqlalchemy/dialects/mysql/pymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/pymysql.py
@@ -31,8 +31,7 @@ class MySQLDialect_pymysql(MySQLDialect_mysqldb):
driver = 'pymysql'
description_encoding = None
- if py3k:
- supports_unicode_statements = True
+ supports_unicode_statements = True
@classmethod
def dbapi(cls):
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 6df38e57e..9f375da94 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -213,6 +213,8 @@ is reflected and the type is reported as ``DATE``, the time-supporting
examining the type of column for use in special Python translations or
for migrating schemas to other database backends.
+.. _oracle_table_options:
+
Oracle Table Options
-------------------------
@@ -228,15 +230,63 @@ in conjunction with the :class:`.Table` construct:
.. versionadded:: 1.0.0
+* ``COMPRESS``::
+
+ Table('mytable', metadata, Column('data', String(32)),
+ oracle_compress=True)
+
+ Table('mytable', metadata, Column('data', String(32)),
+ oracle_compress=6)
+
+ The ``oracle_compress`` parameter accepts either an integer compression
+ level, or ``True`` to use the default compression level.
+
+.. versionadded:: 1.0.0
+
+.. _oracle_index_options:
+
+Oracle Specific Index Options
+-----------------------------
+
+Bitmap Indexes
+~~~~~~~~~~~~~~
+
+You can specify the ``oracle_bitmap`` parameter to create a bitmap index
+instead of a B-tree index::
+
+ Index('my_index', my_table.c.data, oracle_bitmap=True)
+
+Bitmap indexes cannot be unique and cannot be compressed. SQLAlchemy will not
+check for such limitations, only the database will.
+
+.. versionadded:: 1.0.0
+
+Index compression
+~~~~~~~~~~~~~~~~~
+
+Oracle has a more efficient storage mode for indexes containing lots of
+repeated values. Use the ``oracle_compress`` parameter to turn on key c
+ompression::
+
+ Index('my_index', my_table.c.data, oracle_compress=True)
+
+ Index('my_index', my_table.c.data1, my_table.c.data2, unique=True,
+ oracle_compress=1)
+
+The ``oracle_compress`` parameter accepts either an integer specifying the
+number of prefix columns to compress, or ``True`` to use the default (all
+columns for non-unique indexes, all but the last column for unique indexes).
+
+.. versionadded:: 1.0.0
+
"""
import re
from sqlalchemy import util, sql
-from sqlalchemy.engine import default, base, reflection
+from sqlalchemy.engine import default, reflection
from sqlalchemy.sql import compiler, visitors, expression
-from sqlalchemy.sql import (operators as sql_operators,
- functions as sql_functions)
+from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import types as sqltypes, schema as sa_schema
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \
BLOB, CLOB, TIMESTAMP, FLOAT
@@ -549,6 +599,9 @@ class OracleCompiler(compiler.SQLCompiler):
def visit_false(self, expr, **kw):
return '0'
+ def get_cte_preamble(self, recursive):
+ return "WITH"
+
def get_select_hint_text(self, byfroms):
return " ".join(
"/*+ %s */" % text for table, text in byfroms.items()
@@ -619,22 +672,10 @@ class OracleCompiler(compiler.SQLCompiler):
return (self.dialect.identifier_preparer.format_sequence(seq) +
".nextval")
- def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs):
- """Oracle doesn't like ``FROM table AS alias``. Is the AS standard
- SQL??
- """
-
- if asfrom or ashint:
- alias_name = isinstance(alias.name, expression._truncated_label) and \
- self._truncated_identifier("alias", alias.name) or alias.name
+ def get_render_as_alias_suffix(self, alias_name_text):
+ """Oracle doesn't like ``FROM table AS alias``"""
- if ashint:
- return alias_name
- elif asfrom:
- return self.process(alias.original, asfrom=asfrom, **kwargs) + \
- " " + self.preparer.format_alias(alias, alias_name)
- else:
- return self.process(alias.original, **kwargs)
+ return " " + alias_name_text
def returning_clause(self, stmt, returning_cols):
columns = []
@@ -795,9 +836,32 @@ class OracleDDLCompiler(compiler.DDLCompiler):
return text
- def visit_create_index(self, create, **kw):
- return super(OracleDDLCompiler, self).\
- visit_create_index(create, include_schema=True)
+ def visit_create_index(self, create):
+ index = create.element
+ self._verify_index_table(index)
+ preparer = self.preparer
+ text = "CREATE "
+ if index.unique:
+ text += "UNIQUE "
+ if index.dialect_options['oracle']['bitmap']:
+ text += "BITMAP "
+ text += "INDEX %s ON %s (%s)" % (
+ self._prepared_index_name(index, include_schema=True),
+ preparer.format_table(index.table, use_schema=True),
+ ', '.join(
+ self.sql_compiler.process(
+ expr,
+ include_table=False, literal_binds=True)
+ for expr in index.expressions)
+ )
+ if index.dialect_options['oracle']['compress'] is not False:
+ if index.dialect_options['oracle']['compress'] is True:
+ text += " COMPRESS"
+ else:
+ text += " COMPRESS %d" % (
+ index.dialect_options['oracle']['compress']
+ )
+ return text
def post_create_table(self, table):
table_opts = []
@@ -807,6 +871,14 @@ class OracleDDLCompiler(compiler.DDLCompiler):
on_commit_options = opts['on_commit'].replace("_", " ").upper()
table_opts.append('\n ON COMMIT %s' % on_commit_options)
+ if opts['compress']:
+ if opts['compress'] is True:
+ table_opts.append("\n COMPRESS")
+ else:
+ table_opts.append("\n COMPRESS FOR %s" % (
+ opts['compress']
+ ))
+
return ''.join(table_opts)
@@ -870,7 +942,12 @@ class OracleDialect(default.DefaultDialect):
construct_arguments = [
(sa_schema.Table, {
"resolve_synonyms": False,
- "on_commit": None
+ "on_commit": None,
+ "compress": False
+ }),
+ (sa_schema.Index, {
+ "bitmap": False,
+ "compress": False
})
]
@@ -902,6 +979,16 @@ class OracleDialect(default.DefaultDialect):
self.server_version_info < (9, )
@property
+ def _supports_table_compression(self):
+ return self.server_version_info and \
+ self.server_version_info >= (9, 2, )
+
+ @property
+ def _supports_table_compress_for(self):
+ return self.server_version_info and \
+ self.server_version_info >= (11, )
+
+ @property
def _supports_char_length(self):
return not self._is_oracle_8
@@ -1084,6 +1171,50 @@ class OracleDialect(default.DefaultDialect):
return [self.normalize_name(row[0]) for row in cursor]
@reflection.cache
+ def get_table_options(self, connection, table_name, schema=None, **kw):
+ options = {}
+
+ resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
+ dblink = kw.get('dblink', '')
+ info_cache = kw.get('info_cache')
+
+ (table_name, schema, dblink, synonym) = \
+ self._prepare_reflection_args(connection, table_name, schema,
+ resolve_synonyms, dblink,
+ info_cache=info_cache)
+
+ params = {"table_name": table_name}
+
+ columns = ["table_name"]
+ if self._supports_table_compression:
+ columns.append("compression")
+ if self._supports_table_compress_for:
+ columns.append("compress_for")
+
+ text = "SELECT %(columns)s "\
+ "FROM ALL_TABLES%(dblink)s "\
+ "WHERE table_name = :table_name"
+
+ if schema is not None:
+ params['owner'] = schema
+ text += " AND owner = :owner "
+ text = text % {'dblink': dblink, 'columns': ", ".join(columns)}
+
+ result = connection.execute(sql.text(text), **params)
+
+ enabled = dict(DISABLED=False, ENABLED=True)
+
+ row = result.first()
+ if row:
+ if "compression" in row and enabled.get(row.compression, False):
+ if "compress_for" in row:
+ options['oracle_compress'] = row.compress_for
+ else:
+ options['oracle_compress'] = True
+
+ return options
+
+ @reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
"""
@@ -1168,7 +1299,8 @@ class OracleDialect(default.DefaultDialect):
params = {'table_name': table_name}
text = \
- "SELECT a.index_name, a.column_name, b.uniqueness "\
+ "SELECT a.index_name, a.column_name, "\
+ "\nb.index_type, b.uniqueness, b.compression, b.prefix_length "\
"\nFROM ALL_IND_COLUMNS%(dblink)s a, "\
"\nALL_INDEXES%(dblink)s b "\
"\nWHERE "\
@@ -1194,6 +1326,7 @@ class OracleDialect(default.DefaultDialect):
dblink=dblink, info_cache=kw.get('info_cache'))
pkeys = pk_constraint['constrained_columns']
uniqueness = dict(NONUNIQUE=False, UNIQUE=True)
+ enabled = dict(DISABLED=False, ENABLED=True)
oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE)
@@ -1213,10 +1346,15 @@ class OracleDialect(default.DefaultDialect):
if rset.index_name != last_index_name:
remove_if_primary_key(index)
index = dict(name=self.normalize_name(rset.index_name),
- column_names=[])
+ column_names=[], dialect_options={})
indexes.append(index)
index['unique'] = uniqueness.get(rset.uniqueness, False)
+ if rset.index_type in ('BITMAP', 'FUNCTION-BASED BITMAP'):
+ index['dialect_options']['oracle_bitmap'] = True
+ if enabled.get(rset.compression, False):
+ index['dialect_options']['oracle_compress'] = rset.prefix_length
+
# filter out Oracle SYS_NC names. could also do an outer join
# to the all_tab_columns table and check for real col names there.
if not oracle_sys_col.match(rset.column_name):
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index baa640eaa..034ee9076 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -1942,7 +1942,8 @@ class PGDialect(default.DefaultDialect):
cursor = connection.execute(
sql.text(
"select relname from pg_class c join pg_namespace n on "
- "n.oid=c.relnamespace where n.nspname=current_schema() "
+ "n.oid=c.relnamespace where "
+ "pg_catalog.pg_table_is_visible(c.oid) "
"and relname=:name",
bindparams=[
sql.bindparam('name', util.text_type(table_name),
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 1a2a1ffe4..f67b2e3b0 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -159,6 +159,55 @@ defaults to ``utf-8``.
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
obsolete as most DBAPIs now support unicode fully.
+Bound Parameter Styles
+----------------------
+
+The default parameter style for the psycopg2 dialect is "pyformat", where
+SQL is rendered using ``%(paramname)s`` style. This format has the limitation
+that it does not accommodate the unusual case of parameter names that
+actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
+generates bound parameter names based on the name of a column, the presence
+of these characters in a column name can lead to problems.
+
+There are two solutions to the issue of a :class:`.schema.Column` that contains
+one of these characters in its name. One is to specify the
+:paramref:`.schema.Column.key` for columns that have such names::
+
+ measurement = Table('measurement', metadata,
+ Column('Size (meters)', Integer, key='size_meters')
+ )
+
+Above, an INSERT statement such as ``measurement.insert()`` will use
+``size_meters`` as the parameter name, and a SQL expression such as
+``measurement.c.size_meters > 10`` will derive the bound parameter name
+from the ``size_meters`` key as well.
+
+.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
+ as the source of naming when anonymous bound parameters are created
+ in SQL expressions; previously, this behavior only applied to
+ :meth:`.Table.insert` and :meth:`.Table.update` parameter names.
+
+The other solution is to use a positional format; psycopg2 allows use of the
+"format" paramstyle, which can be passed to
+:paramref:`.create_engine.paramstyle`::
+
+ engine = create_engine(
+ 'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
+
+With the above engine, instead of a statement like::
+
+ INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
+ {'Size (meters)': 1}
+
+we instead see::
+
+ INSERT INTO measurement ("Size (meters)") VALUES (%s)
+ (1, )
+
+Where above, the dictionary style is converted into a tuple with positional
+style.
+
+
Transactions
------------
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index 335b35c94..ccd7f2539 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -9,6 +9,7 @@
.. dialect:: sqlite
:name: SQLite
+.. _sqlite_datetime:
Date and Time Types
-------------------
@@ -23,6 +24,20 @@ These types represent dates and times as ISO formatted strings, which also
nicely support ordering. There's no reliance on typical "libc" internals for
these functions so historical dates are fully supported.
+Ensuring Text affinity
+^^^^^^^^^^^^^^^^^^^^^^
+
+The DDL rendered for these types is the standard ``DATE``, ``TIME``
+and ``DATETIME`` indicators. However, custom storage formats can also be
+applied to these types. When the
+storage format is detected as containing no alpha characters, the DDL for
+these types is rendered as ``DATE_CHAR``, ``TIME_CHAR``, and ``DATETIME_CHAR``,
+so that the column continues to have textual affinity.
+
+.. seealso::
+
+ `Type Affinity <http://www.sqlite.org/datatype3.html#affinity>`_ - in the SQLite documentation
+
.. _sqlite_autoincrement:
SQLite Auto Incrementing Behavior
@@ -255,7 +270,7 @@ from ... import util
from ...engine import default, reflection
from ...sql import compiler
-from ...types import (BLOB, BOOLEAN, CHAR, DATE, DECIMAL, FLOAT,
+from ...types import (BLOB, BOOLEAN, CHAR, DECIMAL, FLOAT,
INTEGER, REAL, NUMERIC, SMALLINT, TEXT,
TIMESTAMP, VARCHAR)
@@ -271,6 +286,25 @@ class _DateTimeMixin(object):
if storage_format is not None:
self._storage_format = storage_format
+ @property
+ def format_is_text_affinity(self):
+ """return True if the storage format will automatically imply
+ a TEXT affinity.
+
+ If the storage format contains no non-numeric characters,
+ it will imply a NUMERIC storage format on SQLite; in this case,
+ the type will generate its DDL as DATE_CHAR, DATETIME_CHAR,
+ TIME_CHAR.
+
+ .. versionadded:: 1.0.0
+
+ """
+ spec = self._storage_format % {
+ "year": 0, "month": 0, "day": 0, "hour": 0,
+ "minute": 0, "second": 0, "microsecond": 0
+ }
+ return bool(re.search(r'[^0-9]', spec))
+
def adapt(self, cls, **kw):
if issubclass(cls, _DateTimeMixin):
if self._storage_format:
@@ -526,7 +560,9 @@ ischema_names = {
'BOOLEAN': sqltypes.BOOLEAN,
'CHAR': sqltypes.CHAR,
'DATE': sqltypes.DATE,
+ 'DATE_CHAR': sqltypes.DATE,
'DATETIME': sqltypes.DATETIME,
+ 'DATETIME_CHAR': sqltypes.DATETIME,
'DOUBLE': sqltypes.FLOAT,
'DECIMAL': sqltypes.DECIMAL,
'FLOAT': sqltypes.FLOAT,
@@ -537,6 +573,7 @@ ischema_names = {
'SMALLINT': sqltypes.SMALLINT,
'TEXT': sqltypes.TEXT,
'TIME': sqltypes.TIME,
+ 'TIME_CHAR': sqltypes.TIME,
'TIMESTAMP': sqltypes.TIMESTAMP,
'VARCHAR': sqltypes.VARCHAR,
'NVARCHAR': sqltypes.NVARCHAR,
@@ -646,8 +683,8 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
def visit_foreign_key_constraint(self, constraint):
- local_table = list(constraint._elements.values())[0].parent.table
- remote_table = list(constraint._elements.values())[0].column.table
+ local_table = constraint.elements[0].parent.table
+ remote_table = constraint.elements[0].column.table
if local_table.schema != remote_table.schema:
return None
@@ -670,6 +707,27 @@ class SQLiteTypeCompiler(compiler.GenericTypeCompiler):
def visit_large_binary(self, type_):
return self.visit_BLOB(type_)
+ def visit_DATETIME(self, type_):
+ if not isinstance(type_, _DateTimeMixin) or \
+ type_.format_is_text_affinity:
+ return super(SQLiteTypeCompiler, self).visit_DATETIME(type_)
+ else:
+ return "DATETIME_CHAR"
+
+ def visit_DATE(self, type_):
+ if not isinstance(type_, _DateTimeMixin) or \
+ type_.format_is_text_affinity:
+ return super(SQLiteTypeCompiler, self).visit_DATE(type_)
+ else:
+ return "DATE_CHAR"
+
+ def visit_TIME(self, type_):
+ if not isinstance(type_, _DateTimeMixin) or \
+ type_.format_is_text_affinity:
+ return super(SQLiteTypeCompiler, self).visit_TIME(type_)
+ else:
+ return "TIME_CHAR"
+
class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = set([
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 68145f5cd..cf75871bf 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -292,6 +292,17 @@ def create_engine(*args, **kwargs):
be used instead. Can be used for testing of DBAPIs as well as to
inject "mock" DBAPI implementations into the :class:`.Engine`.
+ :param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
+ to use when rendering bound parameters. This style defaults to the
+ one recommended by the DBAPI itself, which is retrieved from the
+ ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
+ more than one paramstyle, and in particular it may be desirable
+ to change a "named" paramstyle into a "positional" one, or vice versa.
+ When this attribute is passed, it should be one of the values
+ ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
+ ``"pyformat"``, and should correspond to a parameter style known
+ to be supported by the DBAPI in use.
+
:param pool=None: an already-constructed instance of
:class:`~sqlalchemy.pool.Pool`, such as a
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index dd82be1d1..23348469d 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -265,18 +265,18 @@ class Connection(Connectable):
try:
return self.__connection
except AttributeError:
- return self._revalidate_connection()
+ return self._revalidate_connection(_wrap=True)
- def _revalidate_connection(self):
+ def _revalidate_connection(self, _wrap):
if self.__branch_from:
- return self.__branch_from._revalidate_connection()
-
+ return self.__branch_from._revalidate_connection(_wrap=_wrap)
if self.__can_reconnect and self.__invalid:
if self.__transaction is not None:
raise exc.InvalidRequestError(
"Can't reconnect until invalid "
"transaction is rolled back")
- self.__connection = self.engine.raw_connection()
+ self.__connection = self.engine.raw_connection(
+ _connection=self, _wrap=_wrap)
self.__invalid = False
return self.__connection
raise exc.ResourceClosedError("This Connection is closed")
@@ -817,7 +817,7 @@ class Connection(Connectable):
try:
conn = self.__connection
except AttributeError:
- conn = self._revalidate_connection()
+ conn = self._revalidate_connection(_wrap=False)
dialect = self.dialect
ctx = dialect.execution_ctx_cls._init_default(
@@ -955,13 +955,14 @@ class Connection(Connectable):
try:
conn = self.__connection
except AttributeError:
- conn = self._revalidate_connection()
+ conn = self._revalidate_connection(_wrap=False)
context = constructor(dialect, self, conn, *args)
except Exception as e:
- self._handle_dbapi_exception(e,
- util.text_type(statement), parameters,
- None, None)
+ self._handle_dbapi_exception(
+ e,
+ util.text_type(statement), parameters,
+ None, None)
if context.compiled:
context.pre_exec()
@@ -1149,7 +1150,10 @@ class Connection(Connectable):
self._is_disconnect = \
isinstance(e, self.dialect.dbapi.Error) and \
not self.closed and \
- self.dialect.is_disconnect(e, self.__connection, cursor)
+ self.dialect.is_disconnect(
+ e,
+ self.__connection if not self.invalidated else None,
+ cursor)
if context:
context.is_disconnect = self._is_disconnect
@@ -1194,7 +1198,8 @@ class Connection(Connectable):
# new handle_error event
ctx = ExceptionContextImpl(
- e, sqlalchemy_exception, self, cursor, statement,
+ e, sqlalchemy_exception, self.engine,
+ self, cursor, statement,
parameters, context, self._is_disconnect)
for fn in self.dispatch.handle_error:
@@ -1242,6 +1247,59 @@ class Connection(Connectable):
if self.should_close_with_result:
self.close()
+ @classmethod
+ def _handle_dbapi_exception_noconnection(
+ cls, e, dialect, engine, connection):
+
+ exc_info = sys.exc_info()
+
+ is_disconnect = dialect.is_disconnect(e, None, None)
+
+ should_wrap = isinstance(e, dialect.dbapi.Error)
+
+ if should_wrap:
+ sqlalchemy_exception = exc.DBAPIError.instance(
+ None,
+ None,
+ e,
+ dialect.dbapi.Error,
+ connection_invalidated=is_disconnect)
+ else:
+ sqlalchemy_exception = None
+
+ newraise = None
+
+ if engine._has_events:
+ ctx = ExceptionContextImpl(
+ e, sqlalchemy_exception, engine, connection, None, None,
+ None, None, is_disconnect)
+ for fn in engine.dispatch.handle_error:
+ try:
+ # handler returns an exception;
+ # call next handler in a chain
+ per_fn = fn(ctx)
+ if per_fn is not None:
+ ctx.chained_exception = newraise = per_fn
+ except Exception as _raised:
+ # handler raises an exception - stop processing
+ newraise = _raised
+ break
+
+ if sqlalchemy_exception and \
+ is_disconnect != ctx.is_disconnect:
+ sqlalchemy_exception.connection_invalidated = \
+ is_disconnect = ctx.is_disconnect
+
+ if newraise:
+ util.raise_from_cause(newraise, exc_info)
+ elif should_wrap:
+ util.raise_from_cause(
+ sqlalchemy_exception,
+ exc_info
+ )
+ else:
+ util.reraise(*exc_info)
+
def default_schema_name(self):
return self.engine.dialect.get_default_schema_name(self)
@@ -1320,8 +1378,9 @@ class ExceptionContextImpl(ExceptionContext):
"""Implement the :class:`.ExceptionContext` interface."""
def __init__(self, exception, sqlalchemy_exception,
- connection, cursor, statement, parameters,
+ engine, connection, cursor, statement, parameters,
context, is_disconnect):
+ self.engine = engine
self.connection = connection
self.sqlalchemy_exception = sqlalchemy_exception
self.original_exception = exception
@@ -1898,7 +1957,17 @@ class Engine(Connectable, log.Identified):
"""
return self.run_callable(self.dialect.has_table, table_name, schema)
- def raw_connection(self):
+ def _wrap_pool_connect(self, fn, connection, wrap=True):
+ if not wrap:
+ return fn()
+ dialect = self.dialect
+ try:
+ return fn()
+ except dialect.dbapi.Error as e:
+ Connection._handle_dbapi_exception_noconnection(
+ e, dialect, self, connection)
+
+ def raw_connection(self, _connection=None, _wrap=True):
"""Return a "raw" DBAPI connection from the connection pool.
The returned object is a proxied version of the DBAPI
@@ -1914,8 +1983,8 @@ class Engine(Connectable, log.Identified):
:meth:`.Engine.connect` method.
"""
-
- return self.pool.unique_connection()
+ return self._wrap_pool_connect(
+ self.pool.unique_connection, _connection, _wrap)
class OptionEngine(Engine):
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 0ad2efae0..5f66e54b5 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -917,7 +917,23 @@ class ExceptionContext(object):
connection = None
"""The :class:`.Connection` in use during the exception.
- This member is always present.
+ This member is present, except in the case of a failure when
+ first connecting.
+
+ .. seealso::
+
+ :attr:`.ExceptionContext.engine`
+
+
+ """
+
+ engine = None
+ """The :class:`.Engine` in use during the exception.
+
+ This member should always be present, even in the case of a failure
+ when first connecting.
+
+ .. versionadded:: 1.0.0
"""
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 2a1def86a..25f084c15 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -394,6 +394,12 @@ class Inspector(object):
unique
boolean
+ dialect_options
+ dict of dialect-specific index options. May not be present
+ for all dialects.
+
+ .. versionadded:: 1.0.0
+
:param table_name: string name of the table. For special quoting,
use :class:`.quoted_name`.
@@ -642,6 +648,8 @@ class Inspector(object):
columns = index_d['column_names']
unique = index_d['unique']
flavor = index_d.get('type', 'index')
+ dialect_options = index_d.get('dialect_options', {})
+
duplicates = index_d.get('duplicates_constraint')
if include_columns and \
not set(columns).issubset(include_columns):
@@ -667,7 +675,10 @@ class Inspector(object):
else:
idx_cols.append(idx_col)
- sa_schema.Index(name, *idx_cols, **dict(unique=unique))
+ sa_schema.Index(
+ name, *idx_cols,
+ **dict(list(dialect_options.items()) + [('unique', unique)])
+ )
def _reflect_unique_constraints(
self, table_name, schema, table, cols_by_orig_name,
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index 398ef8df6..fd665ad03 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -86,16 +86,7 @@ class DefaultEngineStrategy(EngineStrategy):
pool = pop_kwarg('pool', None)
if pool is None:
def connect():
- try:
- return dialect.connect(*cargs, **cparams)
- except dialect.dbapi.Error as e:
- invalidated = dialect.is_disconnect(e, None, None)
- util.raise_from_cause(
- exc.DBAPIError.instance(
- None, None, e, dialect.dbapi.Error,
- connection_invalidated=invalidated
- )
- )
+ return dialect.connect(*cargs, **cparams)
creator = pop_kwarg('creator', connect)
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index 637523a0e..824b68fdf 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -59,7 +59,10 @@ class TLEngine(base.Engine):
# guards against pool-level reapers, if desired.
# or not connection.connection.is_valid:
connection = self._tl_connection_cls(
- self, self.pool.connect(), **kw)
+ self,
+ self._wrap_pool_connect(
+ self.pool.connect, connection, wrap=True),
+ **kw)
self._connections.conn = weakref.ref(connection)
return connection._increment_connect()
diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py
index b4f057b0a..8600c20f5 100644
--- a/lib/sqlalchemy/events.py
+++ b/lib/sqlalchemy/events.py
@@ -420,6 +420,12 @@ class ConnectionEvents(event.Events):
context, executemany):
log.info("Received statement: %s" % statement)
+ When the methods are called with a `statement` parameter, such as in
+ :meth:`.after_cursor_execute`, :meth:`.before_cursor_execute` and
+ :meth:`.dbapi_error`, the statement is the exact SQL string that was
+ prepared for transmission to the DBAPI ``cursor`` in the connection's
+ :class:`.Dialect`.
+
The :meth:`.before_execute` and :meth:`.before_cursor_execute`
events can also be established with the ``retval=True`` flag, which
allows modification of the statement and parameters to be sent
@@ -549,9 +555,8 @@ class ConnectionEvents(event.Events):
def before_cursor_execute(self, conn, cursor, statement,
parameters, context, executemany):
"""Intercept low-level cursor execute() events before execution,
- receiving the string
- SQL statement and DBAPI-specific parameter list to be invoked
- against a cursor.
+ receiving the string SQL statement and DBAPI-specific parameter list to
+ be invoked against a cursor.
This event is a good choice for logging as well as late modifications
to the SQL string. It's less ideal for parameter modifications except
@@ -571,7 +576,7 @@ class ConnectionEvents(event.Events):
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object
- :param statement: string SQL statement
+ :param statement: string SQL statement, as to be passed to the DBAPI
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
@@ -596,7 +601,7 @@ class ConnectionEvents(event.Events):
:param cursor: DBAPI cursor object. Will have results pending
if the statement was a SELECT, but these should not be consumed
as they will be needed by the :class:`.ResultProxy`.
- :param statement: string SQL statement
+ :param statement: string SQL statement, as passed to the DBAPI
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
@@ -640,7 +645,7 @@ class ConnectionEvents(event.Events):
:param conn: :class:`.Connection` object
:param cursor: DBAPI cursor object
- :param statement: string SQL statement
+ :param statement: string SQL statement, as passed to the DBAPI
:param parameters: Dictionary, tuple, or list of parameters being
passed to the ``execute()`` or ``executemany()`` method of the
DBAPI ``cursor``. In some cases may be ``None``.
@@ -734,6 +739,12 @@ class ConnectionEvents(event.Events):
.. versionadded:: 0.9.7 Added the
:meth:`.ConnectionEvents.handle_error` hook.
+ .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is now
+ invoked when an :class:`.Engine` fails during the initial
+ call to :meth:`.Engine.connect`, as well as when a
+ :class:`.Connection` object encounters an error during a
+ reconnect operation.
+
.. versionchanged:: 1.0.0 The :meth:`.handle_error` event is
not fired off when a dialect makes use of the
``skip_user_error_events`` execution option. This is used
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 082dae054..c61d93230 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -1581,6 +1581,8 @@ class Mapper(InspectionAttr):
self,
prop,
))
+ oldprop = self._props[key]
+ self._path_registry.pop(oldprop, None)
self._props[key] = prop
diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py
index f10a125a8..d4dbf29a0 100644
--- a/lib/sqlalchemy/orm/path_registry.py
+++ b/lib/sqlalchemy/orm/path_registry.py
@@ -13,6 +13,9 @@ from .. import util
from .. import exc
from itertools import chain
from .base import class_mapper
+import logging
+
+log = logging.getLogger(__name__)
def _unreduce_path(path):
@@ -54,9 +57,11 @@ class PathRegistry(object):
self.path == other.path
def set(self, attributes, key, value):
+ log.debug("set '%s' on path '%s' to '%s'", key, self, value)
attributes[(key, self.path)] = value
def setdefault(self, attributes, key, value):
+ log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
attributes.setdefault((key, self.path), value)
def get(self, attributes, key, value=None):
@@ -184,6 +189,11 @@ class PropRegistry(PathRegistry):
self.parent = parent
self.path = parent.path + (prop,)
+ def __str__(self):
+ return " -> ".join(
+ str(elem) for elem in self.path
+ )
+
@util.memoized_property
def has_entity(self):
return hasattr(self.prop, "mapper")
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index f07060825..9b7747e15 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -75,6 +75,7 @@ class Query(object):
_having = None
_distinct = False
_prefixes = None
+ _suffixes = None
_offset = None
_limit = None
_for_update_arg = None
@@ -1003,7 +1004,7 @@ class Query(object):
'_limit', '_offset',
'_joinpath', '_joinpoint',
'_distinct', '_having',
- '_prefixes',
+ '_prefixes', '_suffixes'
):
self.__dict__.pop(attr, None)
self._set_select_from([fromclause], True)
@@ -1740,6 +1741,14 @@ class Query(object):
anonymously aliased. Subsequent calls to :meth:`~.Query.filter`
and similar will adapt the incoming criterion to the target
alias, until :meth:`~.Query.reset_joinpoint` is called.
+ :param isouter=False: If True, the join used will be a left outer join,
+ just as if the :meth:`.Query.outerjoin` method were called. This
+ flag is here to maintain consistency with the same flag as accepted
+ by :meth:`.FromClause.join` and other Core constructs.
+
+
+ .. versionadded:: 1.0.0
+
:param from_joinpoint=False: When using ``aliased=True``, a setting
of True here will cause the join to be from the most recent
joined target, rather than starting back from the original
@@ -1757,13 +1766,15 @@ class Query(object):
SQLAlchemy versions was the primary ORM-level joining interface.
"""
- aliased, from_joinpoint = kwargs.pop('aliased', False),\
- kwargs.pop('from_joinpoint', False)
+ aliased, from_joinpoint, isouter = kwargs.pop('aliased', False),\
+ kwargs.pop('from_joinpoint', False),\
+ kwargs.pop('isouter', False)
if kwargs:
raise TypeError("unknown arguments: %s" %
','.join(kwargs.keys))
+ isouter = isouter
return self._join(props,
- outerjoin=False, create_aliases=aliased,
+ outerjoin=isouter, create_aliases=aliased,
from_joinpoint=from_joinpoint)
def outerjoin(self, *props, **kwargs):
@@ -2349,12 +2360,38 @@ class Query(object):
.. versionadded:: 0.7.7
+ .. seealso::
+
+ :meth:`.HasPrefixes.prefix_with`
+
"""
if self._prefixes:
self._prefixes += prefixes
else:
self._prefixes = prefixes
+ @_generative()
+ def suffix_with(self, *suffixes):
+ """Apply the suffix to the query and return the newly resulting
+ ``Query``.
+
+ :param \*suffixes: optional suffixes, typically strings,
+ not using any commas.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.Query.prefix_with`
+
+ :meth:`.HasSuffixes.suffix_with`
+
+ """
+ if self._suffixes:
+ self._suffixes += suffixes
+ else:
+ self._suffixes = suffixes
+
def all(self):
"""Return the results represented by this ``Query`` as a list.
@@ -2591,6 +2628,7 @@ class Query(object):
'offset': self._offset,
'distinct': self._distinct,
'prefixes': self._prefixes,
+ 'suffixes': self._suffixes,
'group_by': self._group_by or None,
'having': self._having
}
@@ -3396,7 +3434,6 @@ class _BundleEntity(_QueryEntity):
self.supports_single_entity = self.bundle.single_entity
-
@property
def entity_zero(self):
for ent in self._entities:
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index cdb501c14..d95f17f64 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -373,7 +373,7 @@ class LazyLoader(AbstractRelationshipLoader):
self._equated_columns[c] = self._equated_columns[col]
self.logger.info("%s will use query.get() to "
- "optimize instance loads" % self)
+ "optimize instance loads", self)
def init_class_attribute(self, mapper):
self.is_class_level = True
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
index 4f986193e..276da2ae0 100644
--- a/lib/sqlalchemy/orm/strategy_options.py
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -161,11 +161,14 @@ class Load(Generative, MapperOption):
ext_info = inspect(ac)
path_element = ext_info.mapper
+ existing = path.entity_path[prop].get(
+ self.context, "path_with_polymorphic")
if not ext_info.is_aliased_class:
ac = orm_util.with_polymorphic(
ext_info.mapper.base_mapper,
ext_info.mapper, aliased=True,
- _use_mapper_path=True)
+ _use_mapper_path=True,
+ _existing_alias=existing)
path.entity_path[prop].set(
self.context, "path_with_polymorphic", inspect(ac))
path = path[prop][path_element]
@@ -176,6 +179,9 @@ class Load(Generative, MapperOption):
path = path.entity_path
return path
+ def __str__(self):
+ return "Load(strategy=%r)" % self.strategy
+
def _coerce_strat(self, strategy):
if strategy is not None:
strategy = tuple(sorted(strategy.items()))
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index ad610a4ac..4be8d19ff 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -543,8 +543,13 @@ class AliasedInsp(InspectionAttr):
mapper, self)
def __repr__(self):
- return '<AliasedInsp at 0x%x; %s>' % (
- id(self), self.class_.__name__)
+ if self.with_polymorphic_mappers:
+ with_poly = "(%s)" % ", ".join(
+ mp.class_.__name__ for mp in self.with_polymorphic_mappers)
+ else:
+ with_poly = ""
+ return '<AliasedInsp at 0x%x; %s%s>' % (
+ id(self), self.class_.__name__, with_poly)
inspection._inspects(AliasedClass)(lambda target: target._aliased_insp)
@@ -648,7 +653,8 @@ def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False):
def with_polymorphic(base, classes, selectable=False,
flat=False,
polymorphic_on=None, aliased=False,
- innerjoin=False, _use_mapper_path=False):
+ innerjoin=False, _use_mapper_path=False,
+ _existing_alias=None):
"""Produce an :class:`.AliasedClass` construct which specifies
columns for descendant mappers of the given base.
@@ -713,6 +719,16 @@ def with_polymorphic(base, classes, selectable=False,
only be specified if querying for one specific subtype only
"""
primary_mapper = _class_to_mapper(base)
+ if _existing_alias:
+ assert _existing_alias.mapper is primary_mapper
+ classes = util.to_set(classes)
+ new_classes = set([
+ mp.class_ for mp in
+ _existing_alias.with_polymorphic_mappers])
+ if classes == new_classes:
+ return _existing_alias
+ else:
+ classes = classes.union(new_classes)
mappers, selectable = primary_mapper.\
_with_polymorphic_args(classes, selectable,
innerjoin=innerjoin)
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 5fa78ad0f..9304bba9f 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -82,6 +82,7 @@ OPERATORS = {
operators.eq: ' = ',
operators.concat_op: ' || ',
operators.match_op: ' MATCH ',
+ operators.notmatch_op: ' NOT MATCH ',
operators.in_op: ' IN ',
operators.notin_op: ' NOT IN ',
operators.comma_op: ', ',
@@ -862,14 +863,18 @@ class SQLCompiler(Compiled):
else:
return "%s = 0" % self.process(element.element, **kw)
- def visit_binary(self, binary, **kw):
+ def visit_notmatch_op_binary(self, binary, operator, **kw):
+ return "NOT %s" % self.visit_binary(
+ binary, override_operator=operators.match_op)
+
+ def visit_binary(self, binary, override_operator=None, **kw):
# don't allow "? = ?" to render
if self.ansi_bind_rules and \
isinstance(binary.left, elements.BindParameter) and \
isinstance(binary.right, elements.BindParameter):
kw['literal_binds'] = True
- operator_ = binary.operator
+ operator_ = override_operator or binary.operator
disp = getattr(self, "visit_%s_binary" % operator_.__name__, None)
if disp:
return disp(binary, operator_, **kw)
@@ -1188,12 +1193,16 @@ class SQLCompiler(Compiled):
self, asfrom=True, **kwargs
)
+ if cte._suffixes:
+ text += " " + self._generate_prefixes(
+ cte, cte._suffixes, **kwargs)
+
self.ctes[cte] = text
if asfrom:
if cte_alias_name:
text = self.preparer.format_alias(cte, cte_alias_name)
- text += " AS " + cte_name
+ text += self.get_render_as_alias_suffix(cte_name)
else:
return self.preparer.format_alias(cte, cte_name)
return text
@@ -1212,8 +1221,8 @@ class SQLCompiler(Compiled):
elif asfrom:
ret = alias.original._compiler_dispatch(self,
asfrom=True, **kwargs) + \
- " AS " + \
- self.preparer.format_alias(alias, alias_name)
+ self.get_render_as_alias_suffix(
+ self.preparer.format_alias(alias, alias_name))
if fromhints and alias in fromhints:
ret = self.format_from_hint_text(ret, alias,
@@ -1223,6 +1232,9 @@ class SQLCompiler(Compiled):
else:
return alias.original._compiler_dispatch(self, **kwargs)
+ def get_render_as_alias_suffix(self, alias_name_text):
+ return " AS " + alias_name_text
+
def _add_to_result_map(self, keyname, name, objects, type_):
if not self.dialect.case_sensitive:
keyname = keyname.lower()
@@ -1549,6 +1561,10 @@ class SQLCompiler(Compiled):
compound_index == 0 and toplevel:
text = self._render_cte_clause() + text
+ if select._suffixes:
+ text += " " + self._generate_prefixes(
+ select, select._suffixes, **kwargs)
+
self.stack.pop(-1)
if asfrom and parens:
@@ -1729,6 +1745,12 @@ class SQLCompiler(Compiled):
)
def visit_insert(self, insert_stmt, **kw):
+ self.stack.append(
+ {'correlate_froms': set(),
+ "iswrapper": False,
+ "asfrom_froms": set(),
+ "selectable": insert_stmt})
+
self.isinsert = True
crud_params = crud._get_crud_params(self, insert_stmt, **kw)
@@ -1812,6 +1834,8 @@ class SQLCompiler(Compiled):
if self.returning and not self.returning_precedes_values:
text += " " + returning_clause
+ self.stack.pop(-1)
+
return text
def update_limit_clause(self, update_stmt):
@@ -2278,14 +2302,14 @@ class DDLCompiler(Compiled):
formatted_name = self.preparer.format_constraint(constraint)
if formatted_name is not None:
text += "CONSTRAINT %s " % formatted_name
- remote_table = list(constraint._elements.values())[0].column.table
+ remote_table = list(constraint.elements)[0].column.table
text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
', '.join(preparer.quote(f.parent.name)
- for f in constraint._elements.values()),
+ for f in constraint.elements),
self.define_constraint_remote_table(
constraint, remote_table, preparer),
', '.join(preparer.quote(f.column.name)
- for f in constraint._elements.values())
+ for f in constraint.elements)
)
text += self.define_constraint_match(constraint)
text += self.define_constraint_cascades(constraint)
diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py
index 4f53e2979..d26fdc455 100644
--- a/lib/sqlalchemy/sql/default_comparator.py
+++ b/lib/sqlalchemy/sql/default_comparator.py
@@ -68,8 +68,12 @@ class _DefaultColumnComparator(operators.ColumnOperators):
def _boolean_compare(self, expr, op, obj, negate=None, reverse=False,
_python_is_types=(util.NoneType, bool),
+ result_type = None,
**kwargs):
+ if result_type is None:
+ result_type = type_api.BOOLEANTYPE
+
if isinstance(obj, _python_is_types + (Null, True_, False_)):
# allow x ==/!= True/False to be treated as a literal.
@@ -80,7 +84,7 @@ class _DefaultColumnComparator(operators.ColumnOperators):
return BinaryExpression(expr,
_literal_as_text(obj),
op,
- type_=type_api.BOOLEANTYPE,
+ type_=result_type,
negate=negate, modifiers=kwargs)
else:
# all other None/True/False uses IS, IS NOT
@@ -103,13 +107,13 @@ class _DefaultColumnComparator(operators.ColumnOperators):
return BinaryExpression(obj,
expr,
op,
- type_=type_api.BOOLEANTYPE,
+ type_=result_type,
negate=negate, modifiers=kwargs)
else:
return BinaryExpression(expr,
obj,
op,
- type_=type_api.BOOLEANTYPE,
+ type_=result_type,
negate=negate, modifiers=kwargs)
def _binary_operate(self, expr, op, obj, reverse=False, result_type=None,
@@ -125,7 +129,8 @@ class _DefaultColumnComparator(operators.ColumnOperators):
op, result_type = left.comparator._adapt_expression(
op, right.comparator)
- return BinaryExpression(left, right, op, type_=result_type)
+ return BinaryExpression(
+ left, right, op, type_=result_type, modifiers=kw)
def _conjunction_operate(self, expr, op, other, **kw):
if op is operators.and_:
@@ -216,11 +221,16 @@ class _DefaultColumnComparator(operators.ColumnOperators):
def _match_impl(self, expr, op, other, **kw):
"""See :meth:`.ColumnOperators.match`."""
+
return self._boolean_compare(
expr, operators.match_op,
self._check_literal(
expr, operators.match_op, other),
- **kw)
+ result_type=type_api.MATCHTYPE,
+ negate=operators.notmatch_op
+ if op is operators.match_op else operators.match_op,
+ **kw
+ )
def _distinct_impl(self, expr, op, **kw):
"""See :meth:`.ColumnOperators.distinct`."""
@@ -282,6 +292,7 @@ class _DefaultColumnComparator(operators.ColumnOperators):
"isnot": (_boolean_compare, operators.isnot),
"collate": (_collate_impl,),
"match_op": (_match_impl,),
+ "notmatch_op": (_match_impl,),
"distinct_op": (_distinct_impl,),
"between_op": (_between_impl, ),
"notbetween_op": (_between_impl, ),
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index fa9b66024..30965c801 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -1092,7 +1092,7 @@ class BindParameter(ColumnElement):
"""
if isinstance(key, ColumnClause):
type_ = key.type
- key = key.name
+ key = key.key
if required is NO_ARG:
required = (value is NO_ARG and callable_ is None)
if value is NO_ARG:
@@ -2763,7 +2763,7 @@ class BinaryExpression(ColumnElement):
self.right,
self.negate,
negate=self.operator,
- type_=type_api.BOOLEANTYPE,
+ type_=self.type,
modifiers=self.modifiers)
else:
return super(BinaryExpression, self)._negate()
@@ -3335,7 +3335,7 @@ class ColumnClause(Immutable, ColumnElement):
return name
def _bind_param(self, operator, obj):
- return BindParameter(self.name, obj,
+ return BindParameter(self.key, obj,
_compared_to_operator=operator,
_compared_to_type=self.type,
unique=True)
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index 945356328..b08e44ab8 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -767,6 +767,10 @@ def match_op(a, b, **kw):
return a.match(b, **kw)
+def notmatch_op(a, b, **kw):
+ return a.notmatch(b, **kw)
+
+
def comma_op(a, b):
raise NotImplementedError()
@@ -834,6 +838,7 @@ _PRECEDENCE = {
concat_op: 6,
match_op: 6,
+ notmatch_op: 6,
ilike_op: 6,
notilike_op: 6,
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index 96cabbf4f..b90f7fc53 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -824,7 +824,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
table.append_constraint(
c.copy(schema=fk_constraint_schema, target_table=table))
- else:
+ elif not c._type_bound:
table.append_constraint(
c.copy(schema=schema, target_table=table))
for index in self.indexes:
@@ -1295,7 +1295,7 @@ class Column(SchemaItem, ColumnClause):
# Constraint objects plus non-constraint-bound ForeignKey objects
args = \
- [c.copy(**kw) for c in self.constraints] + \
+ [c.copy(**kw) for c in self.constraints if not c._type_bound] + \
[c.copy(**kw) for c in self.foreign_keys if not c.constraint]
type_ = self.type
@@ -1804,7 +1804,7 @@ class ForeignKey(DialectKWArgs, SchemaItem):
match=self.match,
**self._unvalidated_dialect_kw
)
- self.constraint._elements[self.parent] = self
+ self.constraint._append_element(column, self)
self.constraint._set_parent_with_dispatch(table)
table.foreign_keys.add(self)
@@ -2254,7 +2254,7 @@ class Constraint(DialectKWArgs, SchemaItem):
__visit_name__ = 'constraint'
def __init__(self, name=None, deferrable=None, initially=None,
- _create_rule=None, info=None,
+ _create_rule=None, info=None, _type_bound=False,
**dialect_kw):
"""Create a SQL constraint.
@@ -2304,6 +2304,7 @@ class Constraint(DialectKWArgs, SchemaItem):
if info:
self.info = info
self._create_rule = _create_rule
+ self._type_bound = _type_bound
util.set_creation_order(self)
self._validate_dialect_kwargs(dialect_kw)
@@ -2420,7 +2421,7 @@ class CheckConstraint(Constraint):
def __init__(self, sqltext, name=None, deferrable=None,
initially=None, table=None, info=None, _create_rule=None,
- _autoattach=True):
+ _autoattach=True, _type_bound=False):
"""Construct a CHECK constraint.
:param sqltext:
@@ -2450,7 +2451,9 @@ class CheckConstraint(Constraint):
"""
super(CheckConstraint, self).\
- __init__(name, deferrable, initially, _create_rule, info=info)
+ __init__(
+ name, deferrable, initially, _create_rule, info=info,
+ _type_bound=_type_bound)
self.sqltext = _literal_as_text(sqltext, warn=False)
if table is not None:
self._set_parent_with_dispatch(table)
@@ -2485,11 +2488,12 @@ class CheckConstraint(Constraint):
deferrable=self.deferrable,
_create_rule=self._create_rule,
table=target_table,
- _autoattach=False)
+ _autoattach=False,
+ _type_bound=self._type_bound)
return self._schema_item_copy(c)
-class ForeignKeyConstraint(Constraint):
+class ForeignKeyConstraint(ColumnCollectionConstraint):
"""A table-level FOREIGN KEY constraint.
Defines a single column or composite FOREIGN KEY ... REFERENCES
@@ -2564,9 +2568,10 @@ class ForeignKeyConstraint(Constraint):
.. versionadded:: 0.9.2
"""
- super(ForeignKeyConstraint, self).\
- __init__(name, deferrable, initially, info=info, **dialect_kw)
+ Constraint.__init__(
+ self, name=name, deferrable=deferrable, initially=initially,
+ info=info, **dialect_kw)
self.onupdate = onupdate
self.ondelete = ondelete
self.link_to_name = link_to_name
@@ -2575,14 +2580,12 @@ class ForeignKeyConstraint(Constraint):
self.use_alter = use_alter
self.match = match
- self._elements = util.OrderedDict()
-
# standalone ForeignKeyConstraint - create
# associated ForeignKey objects which will be applied to hosted
# Column objects (in col.foreign_keys), either now or when attached
# to the Table for string-specified names
- for col, refcol in zip(columns, refcolumns):
- self._elements[col] = ForeignKey(
+ self.elements = [
+ ForeignKey(
refcol,
_constraint=self,
name=self.name,
@@ -2594,25 +2597,36 @@ class ForeignKeyConstraint(Constraint):
deferrable=self.deferrable,
initially=self.initially,
**self.dialect_kwargs
- )
+ ) for refcol in refcolumns
+ ]
+ ColumnCollectionMixin.__init__(self, *columns)
if table is not None:
+ if hasattr(self, "parent"):
+ assert table is self.parent
self._set_parent_with_dispatch(table)
- elif columns and \
- isinstance(columns[0], Column) and \
- columns[0].table is not None:
- self._set_parent_with_dispatch(columns[0].table)
+
+ def _append_element(self, column, fk):
+ self.columns.add(column)
+ self.elements.append(fk)
+
+ @property
+ def _elements(self):
+ # legacy - provide a dictionary view of (column_key, fk)
+ return util.OrderedDict(
+ zip(self.column_keys, self.elements)
+ )
@property
def _referred_schema(self):
- for elem in self._elements.values():
+ for elem in self.elements:
return elem._referred_schema
else:
return None
def _validate_dest_table(self, table):
table_keys = set([elem._table_key()
- for elem in self._elements.values()])
+ for elem in self.elements])
if None not in table_keys and len(table_keys) > 1:
elem0, elem1 = sorted(table_keys)[0:2]
raise exc.ArgumentError(
@@ -2625,38 +2639,48 @@ class ForeignKeyConstraint(Constraint):
))
@property
- def _col_description(self):
- return ", ".join(self._elements)
+ def column_keys(self):
+ """Return a list of string keys representing the local
+ columns in this :class:`.ForeignKeyConstraint`.
- @property
- def columns(self):
- return list(self._elements)
+ This list is either the original string arguments sent
+ to the constructor of the :class:`.ForeignKeyConstraint`,
+ or if the constraint has been initialized with :class:`.Column`
+ objects, is the string .key of each element.
+
+ .. versionadded:: 1.0.0
+
+ """
+ if hasattr(self, "parent"):
+ return self.columns.keys()
+ else:
+ return [
+ col.key if isinstance(col, ColumnElement)
+ else str(col) for col in self._pending_colargs
+ ]
@property
- def elements(self):
- return list(self._elements.values())
+ def _col_description(self):
+ return ", ".join(self.column_keys)
def _set_parent(self, table):
- super(ForeignKeyConstraint, self)._set_parent(table)
-
- self._validate_dest_table(table)
+ Constraint._set_parent(self, table)
- for col, fk in self._elements.items():
- # string-specified column names now get
- # resolved to Column objects
- if isinstance(col, util.string_types):
- try:
- col = table.c[col]
- except KeyError:
- raise exc.ArgumentError(
- "Can't create ForeignKeyConstraint "
- "on table '%s': no column "
- "named '%s' is present." % (table.description, col))
+ try:
+ ColumnCollectionConstraint._set_parent(self, table)
+ except KeyError as ke:
+ raise exc.ArgumentError(
+ "Can't create ForeignKeyConstraint "
+ "on table '%s': no column "
+ "named '%s' is present." % (table.description, ke.args[0]))
+ for col, fk in zip(self.columns, self.elements):
if not hasattr(fk, 'parent') or \
fk.parent is not col:
fk._set_parent_with_dispatch(col)
+ self._validate_dest_table(table)
+
if self.use_alter:
def supports_alter(ddl, event, schema_item, bind, **kw):
return table in set(kw['tables']) and \
@@ -2669,14 +2693,14 @@ class ForeignKeyConstraint(Constraint):
def copy(self, schema=None, target_table=None, **kw):
fkc = ForeignKeyConstraint(
- [x.parent.key for x in self._elements.values()],
+ [x.parent.key for x in self.elements],
[x._get_colspec(
schema=schema,
table_name=target_table.name
if target_table is not None
and x._table_key() == x.parent.table.key
else None)
- for x in self._elements.values()],
+ for x in self.elements],
name=self.name,
onupdate=self.onupdate,
ondelete=self.ondelete,
@@ -2687,8 +2711,8 @@ class ForeignKeyConstraint(Constraint):
match=self.match
)
for self_fk, other_fk in zip(
- self._elements.values(),
- fkc._elements.values()):
+ self.elements,
+ fkc.elements):
self_fk._schema_item_copy(other_fk)
return self._schema_item_copy(fkc)
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index 8198a6733..87029ec2b 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -171,6 +171,79 @@ class Selectable(ClauseElement):
return self
+class HasPrefixes(object):
+ _prefixes = ()
+
+ @_generative
+ def prefix_with(self, *expr, **kw):
+ """Add one or more expressions following the statement keyword, i.e.
+ SELECT, INSERT, UPDATE, or DELETE. Generative.
+
+ This is used to support backend-specific prefix keywords such as those
+ provided by MySQL.
+
+ E.g.::
+
+ stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
+
+ Multiple prefixes can be specified by multiple calls
+ to :meth:`.prefix_with`.
+
+ :param \*expr: textual or :class:`.ClauseElement` construct which
+ will be rendered following the INSERT, UPDATE, or DELETE
+ keyword.
+ :param \**kw: A single keyword 'dialect' is accepted. This is an
+ optional string dialect name which will
+ limit rendering of this prefix to only that dialect.
+
+ """
+ dialect = kw.pop('dialect', None)
+ if kw:
+ raise exc.ArgumentError("Unsupported argument(s): %s" %
+ ",".join(kw))
+ self._setup_prefixes(expr, dialect)
+
+ def _setup_prefixes(self, prefixes, dialect=None):
+ self._prefixes = self._prefixes + tuple(
+ [(_literal_as_text(p, warn=False), dialect) for p in prefixes])
+
+
+class HasSuffixes(object):
+ _suffixes = ()
+
+ @_generative
+ def suffix_with(self, *expr, **kw):
+ """Add one or more expressions following the statement as a whole.
+
+ This is used to support backend-specific suffix keywords on
+ certain constructs.
+
+ E.g.::
+
+ stmt = select([col1, col2]).cte().suffix_with(
+ "cycle empno set y_cycle to 1 default 0", dialect="oracle")
+
+ Multiple prefixes can be specified by multiple calls
+ to :meth:`.suffix_with`.
+
+ :param \*expr: textual or :class:`.ClauseElement` construct which
+ will be rendered following the target clause.
+ :param \**kw: A single keyword 'dialect' is accepted. This is an
+ optional string dialect name which will
+ limit rendering of this suffix to only that dialect.
+
+ """
+ dialect = kw.pop('dialect', None)
+ if kw:
+ raise exc.ArgumentError("Unsupported argument(s): %s" %
+ ",".join(kw))
+ self._setup_suffixes(expr, dialect)
+
+ def _setup_suffixes(self, suffixes, dialect=None):
+ self._suffixes = self._suffixes + tuple(
+ [(_literal_as_text(p, warn=False), dialect) for p in suffixes])
+
+
class FromClause(Selectable):
"""Represent an element that can be used within the ``FROM``
clause of a ``SELECT`` statement.
@@ -1088,7 +1161,7 @@ class Alias(FromClause):
return self.element.bind
-class CTE(Alias):
+class CTE(Generative, HasSuffixes, Alias):
"""Represent a Common Table Expression.
The :class:`.CTE` object is obtained using the
@@ -1104,10 +1177,13 @@ class CTE(Alias):
name=None,
recursive=False,
_cte_alias=None,
- _restates=frozenset()):
+ _restates=frozenset(),
+ _suffixes=None):
self.recursive = recursive
self._cte_alias = _cte_alias
self._restates = _restates
+ if _suffixes:
+ self._suffixes = _suffixes
super(CTE, self).__init__(selectable, name=name)
def alias(self, name=None, flat=False):
@@ -1116,6 +1192,7 @@ class CTE(Alias):
name=name,
recursive=self.recursive,
_cte_alias=self,
+ _suffixes=self._suffixes
)
def union(self, other):
@@ -1123,7 +1200,8 @@ class CTE(Alias):
self.original.union(other),
name=self.name,
recursive=self.recursive,
- _restates=self._restates.union([self])
+ _restates=self._restates.union([self]),
+ _suffixes=self._suffixes
)
def union_all(self, other):
@@ -1131,7 +1209,8 @@ class CTE(Alias):
self.original.union_all(other),
name=self.name,
recursive=self.recursive,
- _restates=self._restates.union([self])
+ _restates=self._restates.union([self]),
+ _suffixes=self._suffixes
)
@@ -2118,44 +2197,7 @@ class CompoundSelect(GenerativeSelect):
bind = property(bind, _set_bind)
-class HasPrefixes(object):
- _prefixes = ()
-
- @_generative
- def prefix_with(self, *expr, **kw):
- """Add one or more expressions following the statement keyword, i.e.
- SELECT, INSERT, UPDATE, or DELETE. Generative.
-
- This is used to support backend-specific prefix keywords such as those
- provided by MySQL.
-
- E.g.::
-
- stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
-
- Multiple prefixes can be specified by multiple calls
- to :meth:`.prefix_with`.
-
- :param \*expr: textual or :class:`.ClauseElement` construct which
- will be rendered following the INSERT, UPDATE, or DELETE
- keyword.
- :param \**kw: A single keyword 'dialect' is accepted. This is an
- optional string dialect name which will
- limit rendering of this prefix to only that dialect.
-
- """
- dialect = kw.pop('dialect', None)
- if kw:
- raise exc.ArgumentError("Unsupported argument(s): %s" %
- ",".join(kw))
- self._setup_prefixes(expr, dialect)
-
- def _setup_prefixes(self, prefixes, dialect=None):
- self._prefixes = self._prefixes + tuple(
- [(_literal_as_text(p, warn=False), dialect) for p in prefixes])
-
-
-class Select(HasPrefixes, GenerativeSelect):
+class Select(HasPrefixes, HasSuffixes, GenerativeSelect):
"""Represents a ``SELECT`` statement.
"""
@@ -2163,6 +2205,7 @@ class Select(HasPrefixes, GenerativeSelect):
__visit_name__ = 'select'
_prefixes = ()
+ _suffixes = ()
_hints = util.immutabledict()
_statement_hints = ()
_distinct = False
@@ -2180,6 +2223,7 @@ class Select(HasPrefixes, GenerativeSelect):
having=None,
correlate=True,
prefixes=None,
+ suffixes=None,
**kwargs):
"""Construct a new :class:`.Select`.
@@ -2425,6 +2469,9 @@ class Select(HasPrefixes, GenerativeSelect):
if prefixes:
self._setup_prefixes(prefixes)
+ if suffixes:
+ self._setup_suffixes(suffixes)
+
GenerativeSelect.__init__(self, **kwargs)
@property
diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py
index 2729bc83e..9a2de39b4 100644
--- a/lib/sqlalchemy/sql/sqltypes.py
+++ b/lib/sqlalchemy/sql/sqltypes.py
@@ -894,7 +894,7 @@ class LargeBinary(_Binary):
:param length: optional, a length for the column for use in
DDL statements, for those BLOB types that accept a length
- (i.e. MySQL). It does *not* produce a small BINARY/VARBINARY
+ (i.e. MySQL). It does *not* produce a *lengthed* BINARY/VARBINARY
type - use the BINARY/VARBINARY types specifically for those.
May be safely omitted if no ``CREATE
TABLE`` will be issued. Certain databases may require a
@@ -998,13 +998,11 @@ class SchemaType(SchemaEventTarget):
def adapt(self, impltype, **kw):
schema = kw.pop('schema', self.schema)
- # don't associate with MetaData as the hosting type
+ # don't associate with self.metadata as the hosting type
# is already associated with it, avoid creating event
# listeners
- metadata = kw.pop('metadata', None)
return impltype(name=self.name,
schema=schema,
- metadata=metadata,
inherit_schema=self.inherit_schema,
**kw)
@@ -1165,7 +1163,8 @@ class Enum(String, SchemaType):
type_coerce(column, self).in_(self.enums),
name=_defer_name(self.name),
_create_rule=util.portable_instancemethod(
- self._should_create_constraint)
+ self._should_create_constraint),
+ _type_bound=True
)
assert e.table is table
@@ -1303,7 +1302,8 @@ class Boolean(TypeEngine, SchemaType):
type_coerce(column, self).in_([0, 1]),
name=_defer_name(self.name),
_create_rule=util.portable_instancemethod(
- self._should_create_constraint)
+ self._should_create_constraint),
+ _type_bound=True
)
assert e.table is table
@@ -1654,10 +1654,26 @@ class NullType(TypeEngine):
comparator_factory = Comparator
+class MatchType(Boolean):
+ """Refers to the return type of the MATCH operator.
+
+ As the :meth:`.Operators.match` is probably the most open-ended
+ operator in generic SQLAlchemy Core, we can't assume the return type
+ at SQL evaluation time, as MySQL returns a floating point, not a boolean,
+ and other backends might do something different. So this type
+ acts as a placeholder, currently subclassing :class:`.Boolean`.
+ The type allows dialects to inject result-processing functionality
+ if needed, and on MySQL will return floating-point values.
+
+ .. versionadded:: 1.0.0
+
+ """
+
NULLTYPE = NullType()
BOOLEANTYPE = Boolean()
STRINGTYPE = String()
INTEGERTYPE = Integer()
+MATCHTYPE = MatchType()
_type_map = {
int: Integer(),
@@ -1685,6 +1701,7 @@ type_api.BOOLEANTYPE = BOOLEANTYPE
type_api.STRINGTYPE = STRINGTYPE
type_api.INTEGERTYPE = INTEGERTYPE
type_api.NULLTYPE = NULLTYPE
+type_api.MATCHTYPE = MATCHTYPE
type_api._type_map = _type_map
# this one, there's all kinds of ways to play it, but at the EOD
diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py
index 77c6e1b1e..d3e0a008e 100644
--- a/lib/sqlalchemy/sql/type_api.py
+++ b/lib/sqlalchemy/sql/type_api.py
@@ -19,7 +19,7 @@ BOOLEANTYPE = None
INTEGERTYPE = None
NULLTYPE = None
STRINGTYPE = None
-
+MATCHTYPE = None
class TypeEngine(Visitable):
"""The ultimate base class for all SQL datatypes.
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 1284f9c2a..0f6f59401 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -280,10 +280,10 @@ class DBAPIProxyCursor(object):
"""
- def __init__(self, engine, conn):
+ def __init__(self, engine, conn, *args, **kwargs):
self.engine = engine
self.connection = conn
- self.cursor = conn.cursor()
+ self.cursor = conn.cursor(*args, **kwargs)
def execute(self, stmt, parameters=None, **kw):
if parameters:
@@ -311,8 +311,8 @@ class DBAPIProxyConnection(object):
self.engine = engine
self.cursor_cls = cursor_cls
- def cursor(self):
- return self.cursor_cls(self.engine, self.conn)
+ def cursor(self, *args, **kwargs):
+ return self.cursor_cls(self.engine, self.conn, *args, **kwargs)
def close(self):
self.conn.close()
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index f94724608..0aff43ae1 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -425,7 +425,7 @@ def skip(db, reason=None):
def only_on(dbs, reason=None):
return only_if(
- OrPredicate([SpecPredicate(db) for db in util.to_list(dbs)])
+ OrPredicate([Predicate.as_predicate(db) for db in util.to_list(dbs)])
)
diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py
index 6696427dc..614a12133 100644
--- a/lib/sqlalchemy/testing/plugin/plugin_base.py
+++ b/lib/sqlalchemy/testing/plugin/plugin_base.py
@@ -93,7 +93,10 @@ def setup_options(make_option):
help="Exclude tests with tag <tag>")
make_option("--write-profiles", action="store_true",
dest="write_profiles", default=False,
- help="Write/update profiling data.")
+ help="Write/update failing profiling data.")
+ make_option("--force-write-profiles", action="store_true",
+ dest="force_write_profiles", default=False,
+ help="Unconditionally write/update profiling data.")
def configure_follower(follower_ident):
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index fcb888f86..671bbe32d 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -42,7 +42,11 @@ class ProfileStatsFile(object):
"""
def __init__(self, filename):
- self.write = (
+ self.force_write = (
+ config.options is not None and
+ config.options.force_write_profiles
+ )
+ self.write = self.force_write or (
config.options is not None and
config.options.write_profiles
)
@@ -115,7 +119,11 @@ class ProfileStatsFile(object):
per_fn = self.data[test_key]
per_platform = per_fn[self.platform_key]
counts = per_platform['counts']
- counts[-1] = callcount
+ current_count = per_platform['current_count']
+ if current_count < len(counts):
+ counts[current_count - 1] = callcount
+ else:
+ counts[-1] = callcount
if self.write:
self._write()
@@ -235,7 +243,7 @@ def count_functions(variance=0.05):
deviance = int(callcount * variance)
failed = abs(callcount - expected_count) > deviance
- if failed:
+ if failed or _profile_stats.force_write:
if _profile_stats.write:
_profile_stats.replace(callcount)
else:
diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py
index da3e3128a..5744431cb 100644
--- a/lib/sqlalchemy/testing/requirements.py
+++ b/lib/sqlalchemy/testing/requirements.py
@@ -323,6 +323,11 @@ class SuiteRequirements(Requirements):
return exclusions.closed()
@property
+ def temporary_tables(self):
+ """target database supports temporary tables"""
+ return exclusions.open()
+
+ @property
def temporary_views(self):
"""target database supports temporary views"""
return exclusions.closed()
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 08b858b47..3edbdeb8c 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -128,6 +128,10 @@ class ComponentReflectionTest(fixtures.TablesTest):
DDL("create temporary view user_tmp_v as "
"select * from user_tmp")
)
+ event.listen(
+ user_tmp, "before_drop",
+ DDL("drop view user_tmp_v")
+ )
@classmethod
def define_index(cls, metadata, users):
@@ -511,6 +515,8 @@ class ComponentReflectionTest(fixtures.TablesTest):
def test_get_temp_table_indexes(self):
insp = inspect(self.metadata.bind)
indexes = insp.get_indexes('user_tmp')
+ for ind in indexes:
+ ind.pop('dialect_options', None)
eq_(
# TODO: we need to add better filtering for indexes/uq constraints
# that are doubled up
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index b49e389ac..1215bd790 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -51,6 +51,7 @@ from .sql.sqltypes import (
Integer,
Interval,
LargeBinary,
+ MatchType,
NCHAR,
NVARCHAR,
NullType,
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index a1fbc0fa0..d36852698 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -10,9 +10,10 @@
from __future__ import absolute_import
import weakref
import operator
-from .compat import threading, itertools_filterfalse
+from .compat import threading, itertools_filterfalse, string_types
from . import py2k
import types
+import collections
EMPTY_SET = frozenset()
@@ -779,10 +780,12 @@ def coerce_generator_arg(arg):
def to_list(x, default=None):
if x is None:
return default
- if not isinstance(x, (list, tuple)):
+ if not isinstance(x, collections.Iterable) or isinstance(x, string_types):
return [x]
- else:
+ elif isinstance(x, list):
return x
+ else:
+ return list(x)
def to_set(x):
diff --git a/test/aaa_profiling/test_memusage.py b/test/aaa_profiling/test_memusage.py
index f4bce6b01..63883daac 100644
--- a/test/aaa_profiling/test_memusage.py
+++ b/test/aaa_profiling/test_memusage.py
@@ -658,6 +658,32 @@ class MemUsageTest(EnsureZeroed):
row[t.c.x]
go()
+ def test_many_discarded_relationships(self):
+ """a use case that really isn't supported, nonetheless we can
+ guard against memleaks here so why not"""
+
+ m1 = MetaData()
+ t1 = Table('t1', m1, Column('id', Integer, primary_key=True))
+ t2 = Table(
+ 't2', m1, Column('id', Integer, primary_key=True),
+ Column('t1id', ForeignKey('t1.id')))
+
+ class T1(object):
+ pass
+ t1_mapper = mapper(T1, t1)
+
+ @testing.emits_warning()
+ @profile_memory()
+ def go():
+ class T2(object):
+ pass
+ t2_mapper = mapper(T2, t2)
+ t1_mapper.add_property("bar", relationship(t2_mapper))
+ s1 = Session()
+ # this causes the path_registry to be invoked
+ s1.query(t1_mapper)._compile_context()
+ go()
+
# fails on newer versions of pysqlite due to unusual memory behvior
# in pysqlite itself. background at:
# http://thread.gmane.org/gmane.comp.python.db.pysqlite.user/2290
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index f75c5cbe9..df61d7874 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -8,6 +8,7 @@ from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec
from sqlalchemy.sql import column
from sqlalchemy.util import langhelpers
+
class _KeyedTupleTest(object):
def _fixture(self, values, labels):
@@ -283,6 +284,35 @@ class MemoizedAttrTest(fixtures.TestBase):
eq_(val[0], 21)
+class ToListTest(fixtures.TestBase):
+ def test_from_string(self):
+ eq_(
+ util.to_list("xyz"),
+ ["xyz"]
+ )
+
+ def test_from_set(self):
+ spec = util.to_list(set([1, 2, 3]))
+ assert isinstance(spec, list)
+ eq_(
+ sorted(spec),
+ [1, 2, 3]
+ )
+
+ def test_from_dict(self):
+ spec = util.to_list({1: "a", 2: "b", 3: "c"})
+ assert isinstance(spec, list)
+ eq_(
+ sorted(spec),
+ [1, 2, 3]
+ )
+
+ def test_from_tuple(self):
+ eq_(
+ util.to_list((1, 2, 3)),
+ [1, 2, 3]
+ )
+
class ColumnCollectionTest(fixtures.TestBase):
def test_in(self):
diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py
index 4b4780d43..a994b1787 100644
--- a/test/dialect/mssql/test_engine.py
+++ b/test/dialect/mssql/test_engine.py
@@ -157,8 +157,7 @@ class ParseConnectTest(fixtures.TestBase):
eq_(dialect.is_disconnect("not an error", None, None), False)
- @testing.only_on(['mssql+pyodbc', 'mssql+pymssql'],
- "FreeTDS specific test")
+ @testing.requires.mssql_freetds
def test_bad_freetds_warning(self):
engine = engines.testing_engine()
diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py
index e93162a8e..bee441586 100644
--- a/test/dialect/mssql/test_reflection.py
+++ b/test/dialect/mssql/test_reflection.py
@@ -24,14 +24,14 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
Column('user_name', types.VARCHAR(20), nullable=False),
Column('test1', types.CHAR(5), nullable=False),
Column('test2', types.Float(5), nullable=False),
- Column('test3', types.Text),
+ Column('test3', types.Text('max')),
Column('test4', types.Numeric, nullable=False),
Column('test5', types.DateTime),
Column('parent_user_id', types.Integer,
ForeignKey('engine_users.user_id')),
Column('test6', types.DateTime, nullable=False),
- Column('test7', types.Text),
- Column('test8', types.LargeBinary),
+ Column('test7', types.Text('max')),
+ Column('test8', types.LargeBinary('max')),
Column('test_passivedefault2', types.Integer,
server_default='5'),
Column('test9', types.BINARY(100)),
@@ -187,7 +187,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = :TABLE_NAME_1",
+ "[TABLES_1].[TABLE_NAME] = :table_name_1",
dialect=dialect
)
@@ -197,13 +197,18 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = CAST(:TABLE_NAME_1 AS NVARCHAR(max))",
+ "[TABLES_1].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
dialect=dialect
)
class ReflectHugeViewTest(fixtures.TestBase):
__only_on__ = 'mssql'
+ # crashes on freetds 0.91, not worth it
+ __skip_if__ = (
+ lambda: testing.requires.mssql_freetds.enabled,
+ )
+
def setup(self):
self.col_num = 150
diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py
index 9dc1983ae..5c9157379 100644
--- a/test/dialect/mssql/test_types.py
+++ b/test/dialect/mssql/test_types.py
@@ -2,12 +2,15 @@
from sqlalchemy.testing import eq_, engines, pickleable
import datetime
import os
-from sqlalchemy import *
+from sqlalchemy import Table, Column, MetaData, Float, \
+ Integer, String, Boolean, TIMESTAMP, Sequence, Numeric, select, \
+ Date, Time, DateTime, DefaultClause, PickleType, text, Text, \
+ UnicodeText, LargeBinary
from sqlalchemy import types, schema
from sqlalchemy.databases import mssql
from sqlalchemy.dialects.mssql.base import TIME
from sqlalchemy.testing import fixtures, \
- AssertsExecutionResults, ComparesTables
+ AssertsExecutionResults, ComparesTables
from sqlalchemy import testing
from sqlalchemy.testing import emits_warning_on
import decimal
@@ -32,6 +35,7 @@ class TimeTypeTest(fixtures.TestBase):
class TypeDDLTest(fixtures.TestBase):
+
def test_boolean(self):
"Exercise type specification for boolean type."
@@ -39,7 +43,7 @@ class TypeDDLTest(fixtures.TestBase):
# column type, args, kwargs, expected ddl
(Boolean, [], {},
'BIT'),
- ]
+ ]
metadata = MetaData()
table_args = ['test_mssql_boolean', metadata]
@@ -54,11 +58,11 @@ class TypeDDLTest(fixtures.TestBase):
for col in boolean_table.c:
index = int(col.name[1:])
- testing.eq_(gen.get_column_specification(col),
- "%s %s" % (col.name, columns[index][3]))
+ testing.eq_(
+ gen.get_column_specification(col),
+ "%s %s" % (col.name, columns[index][3]))
self.assert_(repr(col))
-
def test_numeric(self):
"Exercise type specification and options for numeric types."
@@ -88,7 +92,7 @@ class TypeDDLTest(fixtures.TestBase):
'TINYINT'),
(types.SmallInteger, [], {},
'SMALLINT'),
- ]
+ ]
metadata = MetaData()
table_args = ['test_mssql_numeric', metadata]
@@ -103,11 +107,11 @@ class TypeDDLTest(fixtures.TestBase):
for col in numeric_table.c:
index = int(col.name[1:])
- testing.eq_(gen.get_column_specification(col),
- "%s %s" % (col.name, columns[index][3]))
+ testing.eq_(
+ gen.get_column_specification(col),
+ "%s %s" % (col.name, columns[index][3]))
self.assert_(repr(col))
-
def test_char(self):
"""Exercise COLLATE-ish options on string types."""
@@ -149,7 +153,7 @@ class TypeDDLTest(fixtures.TestBase):
'NTEXT'),
(mssql.MSNText, [], {'collation': 'Latin1_General_CI_AS'},
'NTEXT COLLATE Latin1_General_CI_AS'),
- ]
+ ]
metadata = MetaData()
table_args = ['test_mssql_charset', metadata]
@@ -164,10 +168,48 @@ class TypeDDLTest(fixtures.TestBase):
for col in charset_table.c:
index = int(col.name[1:])
- testing.eq_(gen.get_column_specification(col),
- "%s %s" % (col.name, columns[index][3]))
+ testing.eq_(
+ gen.get_column_specification(col),
+ "%s %s" % (col.name, columns[index][3]))
self.assert_(repr(col))
+ def test_large_type_deprecation(self):
+ d1 = mssql.dialect(deprecate_large_types=True)
+ d2 = mssql.dialect(deprecate_large_types=False)
+ d3 = mssql.dialect()
+ d3.server_version_info = (11, 0)
+ d3._setup_version_attributes()
+ d4 = mssql.dialect()
+ d4.server_version_info = (10, 0)
+ d4._setup_version_attributes()
+
+ for dialect in (d1, d3):
+ eq_(
+ str(Text().compile(dialect=dialect)),
+ "VARCHAR(max)"
+ )
+ eq_(
+ str(UnicodeText().compile(dialect=dialect)),
+ "NVARCHAR(max)"
+ )
+ eq_(
+ str(LargeBinary().compile(dialect=dialect)),
+ "VARBINARY(max)"
+ )
+
+ for dialect in (d2, d4):
+ eq_(
+ str(Text().compile(dialect=dialect)),
+ "TEXT"
+ )
+ eq_(
+ str(UnicodeText().compile(dialect=dialect)),
+ "NTEXT"
+ )
+ eq_(
+ str(LargeBinary().compile(dialect=dialect)),
+ "IMAGE"
+ )
def test_timestamp(self):
"""Exercise TIMESTAMP column."""
@@ -176,9 +218,10 @@ class TypeDDLTest(fixtures.TestBase):
metadata = MetaData()
spec, expected = (TIMESTAMP, 'TIMESTAMP')
- t = Table('mssql_ts', metadata,
- Column('id', Integer, primary_key=True),
- Column('t', spec, nullable=None))
+ t = Table(
+ 'mssql_ts', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('t', spec, nullable=None))
gen = dialect.ddl_compiler(dialect, schema.CreateTable(t))
testing.eq_(gen.get_column_specification(t.c.t), "t %s" % expected)
self.assert_(repr(t.c.t))
@@ -255,7 +298,11 @@ class TypeDDLTest(fixtures.TestBase):
% (col.name, columns[index][3]))
self.assert_(repr(col))
-class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTables):
+metadata = None
+
+
+class TypeRoundTripTest(
+ fixtures.TestBase, AssertsExecutionResults, ComparesTables):
__only_on__ = 'mssql'
@classmethod
@@ -266,15 +313,18 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
def teardown(self):
metadata.drop_all()
- @testing.fails_on_everything_except('mssql+pyodbc',
- 'this is some pyodbc-specific feature')
+ @testing.fails_on_everything_except(
+ 'mssql+pyodbc',
+ 'this is some pyodbc-specific feature')
def test_decimal_notation(self):
- numeric_table = Table('numeric_table', metadata, Column('id',
- Integer, Sequence('numeric_id_seq',
- optional=True), primary_key=True),
- Column('numericcol',
- Numeric(precision=38, scale=20,
- asdecimal=True)))
+ numeric_table = Table(
+ 'numeric_table', metadata,
+ Column(
+ 'id', Integer,
+ Sequence('numeric_id_seq', optional=True), primary_key=True),
+ Column(
+ 'numericcol',
+ Numeric(precision=38, scale=20, asdecimal=True)))
metadata.create_all()
test_items = [decimal.Decimal(d) for d in (
'1500000.00000000000000000000',
@@ -323,7 +373,7 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
'000000000000.32E12',
'00000000000000.1E+12',
'000000000000.2E-32',
- )]
+ )]
for value in test_items:
numeric_table.insert().execute(numericcol=value)
@@ -332,10 +382,13 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
assert value[0] in test_items, "%r not in test_items" % value[0]
def test_float(self):
- float_table = Table('float_table', metadata, Column('id',
- Integer, Sequence('numeric_id_seq',
- optional=True), primary_key=True),
- Column('floatcol', Float()))
+ float_table = Table(
+ 'float_table', metadata,
+ Column(
+ 'id', Integer,
+ Sequence('numeric_id_seq', optional=True), primary_key=True),
+ Column('floatcol', Float()))
+
metadata.create_all()
try:
test_items = [float(d) for d in (
@@ -363,13 +416,12 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
'1E-6',
'1E-7',
'1E-8',
- )]
+ )]
for value in test_items:
float_table.insert().execute(floatcol=value)
except Exception as e:
raise e
-
# todo this should suppress warnings, but it does not
@emits_warning_on('mssql+mxodbc', r'.*does not have any indexes.*')
def test_dates(self):
@@ -417,20 +469,20 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
(mssql.MSDateTime2, [1], {},
'DATETIME2(1)', ['>=', (10,)]),
- ]
+ ]
table_args = ['test_mssql_dates', metadata]
for index, spec in enumerate(columns):
type_, args, kw, res, requires = spec[0:5]
- if requires and testing._is_excluded('mssql', *requires) \
- or not requires:
- c = Column('c%s' % index, type_(*args,
- **kw), nullable=None)
+ if requires and \
+ testing._is_excluded('mssql', *requires) or not requires:
+ c = Column('c%s' % index, type_(*args, **kw), nullable=None)
testing.db.dialect.type_descriptor(c.type)
table_args.append(c)
dates_table = Table(*table_args)
- gen = testing.db.dialect.ddl_compiler(testing.db.dialect,
- schema.CreateTable(dates_table))
+ gen = testing.db.dialect.ddl_compiler(
+ testing.db.dialect,
+ schema.CreateTable(dates_table))
for col in dates_table.c:
index = int(col.name[1:])
testing.eq_(gen.get_column_specification(col), '%s %s'
@@ -443,13 +495,14 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
self.assert_types_base(col, dates_table.c[col.key])
def test_date_roundtrip(self):
- t = Table('test_dates', metadata,
- Column('id', Integer,
- Sequence('datetest_id_seq', optional=True),
- primary_key=True),
- Column('adate', Date),
- Column('atime', Time),
- Column('adatetime', DateTime))
+ t = Table(
+ 'test_dates', metadata,
+ Column('id', Integer,
+ Sequence('datetest_id_seq', optional=True),
+ primary_key=True),
+ Column('adate', Date),
+ Column('atime', Time),
+ Column('adatetime', DateTime))
metadata.create_all()
d1 = datetime.date(2007, 10, 30)
t1 = datetime.time(11, 2, 32)
@@ -471,18 +524,18 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
@emits_warning_on('mssql+mxodbc', r'.*does not have any indexes.*')
@testing.provide_metadata
- def test_binary_reflection(self):
+ def _test_binary_reflection(self, deprecate_large_types):
"Exercise type specification for binary types."
columns = [
- # column type, args, kwargs, expected ddl
+ # column type, args, kwargs, expected ddl from reflected
(mssql.MSBinary, [], {},
- 'BINARY'),
+ 'BINARY(1)'),
(mssql.MSBinary, [10], {},
'BINARY(10)'),
(types.BINARY, [], {},
- 'BINARY'),
+ 'BINARY(1)'),
(types.BINARY, [10], {},
'BINARY(10)'),
@@ -503,10 +556,12 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
'IMAGE'),
(types.LargeBinary, [], {},
- 'IMAGE'),
+ 'IMAGE' if not deprecate_large_types else 'VARBINARY(max)'),
]
metadata = self.metadata
+ metadata.bind = engines.testing_engine(
+ options={"deprecate_large_types": deprecate_large_types})
table_args = ['test_mssql_binary', metadata]
for index, spec in enumerate(columns):
type_, args, kw, res = spec
@@ -516,59 +571,80 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
metadata.create_all()
reflected_binary = Table('test_mssql_binary',
MetaData(testing.db), autoload=True)
- for col in reflected_binary.c:
+ for col, spec in zip(reflected_binary.c, columns):
+ eq_(
+ str(col.type), spec[3],
+ "column %s %s != %s" % (col.key, str(col.type), spec[3])
+ )
c1 = testing.db.dialect.type_descriptor(col.type).__class__
c2 = \
testing.db.dialect.type_descriptor(
binary_table.c[col.name].type).__class__
- assert issubclass(c1, c2), '%r is not a subclass of %r' \
- % (c1, c2)
+ assert issubclass(c1, c2), \
+ 'column %s: %r is not a subclass of %r' \
+ % (col.key, c1, c2)
if binary_table.c[col.name].type.length:
testing.eq_(col.type.length,
binary_table.c[col.name].type.length)
+ def test_binary_reflection_legacy_large_types(self):
+ self._test_binary_reflection(False)
+
+ @testing.only_on('mssql >= 11')
+ def test_binary_reflection_sql2012_large_types(self):
+ self._test_binary_reflection(True)
def test_autoincrement(self):
- Table('ai_1', metadata,
- Column('int_y', Integer, primary_key=True),
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
- Table('ai_2', metadata,
- Column('int_y', Integer, primary_key=True),
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
- Table('ai_3', metadata,
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False),
- Column('int_y', Integer, primary_key=True))
- Table('ai_4', metadata,
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False),
- Column('int_n2', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
- Table('ai_5', metadata,
- Column('int_y', Integer, primary_key=True),
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
- Table('ai_6', metadata,
- Column('o1', String(1), DefaultClause('x'),
- primary_key=True),
- Column('int_y', Integer, primary_key=True))
- Table('ai_7', metadata,
- Column('o1', String(1), DefaultClause('x'),
- primary_key=True),
- Column('o2', String(1), DefaultClause('x'),
- primary_key=True),
- Column('int_y', Integer, primary_key=True))
- Table('ai_8', metadata,
- Column('o1', String(1), DefaultClause('x'),
- primary_key=True),
- Column('o2', String(1), DefaultClause('x'),
- primary_key=True))
+ Table(
+ 'ai_1', metadata,
+ Column('int_y', Integer, primary_key=True),
+ Column(
+ 'int_n', Integer, DefaultClause('0'),
+ primary_key=True, autoincrement=False))
+ Table(
+ 'ai_2', metadata,
+ Column('int_y', Integer, primary_key=True),
+ Column('int_n', Integer, DefaultClause('0'),
+ primary_key=True, autoincrement=False))
+ Table(
+ 'ai_3', metadata,
+ Column('int_n', Integer, DefaultClause('0'),
+ primary_key=True, autoincrement=False),
+ Column('int_y', Integer, primary_key=True))
+
+ Table(
+ 'ai_4', metadata,
+ Column('int_n', Integer, DefaultClause('0'),
+ primary_key=True, autoincrement=False),
+ Column('int_n2', Integer, DefaultClause('0'),
+ primary_key=True, autoincrement=False))
+ Table(
+ 'ai_5', metadata,
+ Column('int_y', Integer, primary_key=True),
+ Column('int_n', Integer, DefaultClause('0'),
+ primary_key=True, autoincrement=False))
+ Table(
+ 'ai_6', metadata,
+ Column('o1', String(1), DefaultClause('x'),
+ primary_key=True),
+ Column('int_y', Integer, primary_key=True))
+ Table(
+ 'ai_7', metadata,
+ Column('o1', String(1), DefaultClause('x'),
+ primary_key=True),
+ Column('o2', String(1), DefaultClause('x'),
+ primary_key=True),
+ Column('int_y', Integer, primary_key=True))
+ Table(
+ 'ai_8', metadata,
+ Column('o1', String(1), DefaultClause('x'),
+ primary_key=True),
+ Column('o2', String(1), DefaultClause('x'),
+ primary_key=True))
metadata.create_all()
table_names = ['ai_1', 'ai_2', 'ai_3', 'ai_4',
- 'ai_5', 'ai_6', 'ai_7', 'ai_8']
+ 'ai_5', 'ai_6', 'ai_7', 'ai_8']
mr = MetaData(testing.db)
for name in table_names:
@@ -586,27 +662,29 @@ class TypeRoundTripTest(fixtures.TestBase, AssertsExecutionResults, ComparesTabl
if testing.db.driver == 'mxodbc':
eng = \
- [engines.testing_engine(options={'implicit_returning'
- : True})]
+ [engines.testing_engine(options={
+ 'implicit_returning': True})]
else:
eng = \
- [engines.testing_engine(options={'implicit_returning'
- : False}),
- engines.testing_engine(options={'implicit_returning'
- : True})]
+ [engines.testing_engine(options={
+ 'implicit_returning': False}),
+ engines.testing_engine(options={
+ 'implicit_returning': True})]
for counter, engine in enumerate(eng):
engine.execute(tbl.insert())
if 'int_y' in tbl.c:
assert engine.scalar(select([tbl.c.int_y])) \
== counter + 1
- assert list(engine.execute(tbl.select()).first()).\
- count(counter + 1) == 1
+ assert list(
+ engine.execute(tbl.select()).first()).\
+ count(counter + 1) == 1
else:
assert 1 \
not in list(engine.execute(tbl.select()).first())
engine.execute(tbl.delete())
+
class MonkeyPatchedBinaryTest(fixtures.TestBase):
__only_on__ = 'mssql+pymssql'
@@ -622,7 +700,12 @@ class MonkeyPatchedBinaryTest(fixtures.TestBase):
result = module.Binary(input)
eq_(result, expected_result)
+binary_table = None
+MyPickleType = None
+
+
class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
+
"""Test the Binary and VarBinary types"""
__only_on__ = 'mssql'
@@ -655,7 +738,7 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
Column('misc', String(30)),
Column('pickled', PickleType),
Column('mypickle', MyPickleType),
- )
+ )
binary_table.create()
def teardown(self):
@@ -679,7 +762,7 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
data_slice=stream1[0:100],
pickled=testobj1,
mypickle=testobj3,
- )
+ )
binary_table.insert().execute(
primary_id=2,
misc='binary_data_two.dat',
@@ -687,7 +770,7 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
data_image=stream2,
data_slice=stream2[0:99],
pickled=testobj2,
- )
+ )
# TODO: pyodbc does not seem to accept "None" for a VARBINARY
# column (data=None). error: [Microsoft][ODBC SQL Server
@@ -697,17 +780,21 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
# misc='binary_data_two.dat', data=None, data_image=None,
# data_slice=stream2[0:99], pickled=None)
- binary_table.insert().execute(primary_id=3,
- misc='binary_data_two.dat', data_image=None,
- data_slice=stream2[0:99], pickled=None)
+ binary_table.insert().execute(
+ primary_id=3,
+ misc='binary_data_two.dat', data_image=None,
+ data_slice=stream2[0:99], pickled=None)
for stmt in \
binary_table.select(order_by=binary_table.c.primary_id), \
- text('select * from binary_table order by '
- 'binary_table.primary_id',
- typemap=dict(data=mssql.MSVarBinary(8000),
- data_image=mssql.MSImage,
- data_slice=types.BINARY(100), pickled=PickleType,
- mypickle=MyPickleType), bind=testing.db):
+ text(
+ 'select * from binary_table order by '
+ 'binary_table.primary_id',
+ typemap=dict(
+ data=mssql.MSVarBinary(8000),
+ data_image=mssql.MSImage,
+ data_slice=types.BINARY(100), pickled=PickleType,
+ mypickle=MyPickleType),
+ bind=testing.db):
l = stmt.execute().fetchall()
eq_(list(stream1), list(l[0]['data']))
paddedstream = list(stream1[0:100])
@@ -721,7 +808,8 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
def load_stream(self, name, len=3000):
- fp = open(os.path.join(os.path.dirname(__file__), "..", "..", name), 'rb')
+ fp = open(
+ os.path.join(os.path.dirname(__file__), "..", "..", name), 'rb')
stream = fp.read(len)
fp.close()
return stream
diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py
index e085d86c1..ccb501651 100644
--- a/test/dialect/mysql/test_query.py
+++ b/test/dialect/mysql/test_query.py
@@ -55,7 +55,7 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
])
matchtable.insert().execute([
{'id': 1,
- 'title': 'Agile Web Development with Rails',
+ 'title': 'Agile Web Development with Ruby On Rails',
'category_id': 2},
{'id': 2,
'title': 'Dive Into Python',
@@ -76,7 +76,7 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
metadata.drop_all()
@testing.fails_on('mysql+mysqlconnector', 'uses pyformat')
- def test_expression(self):
+ def test_expression_format(self):
format = testing.db.dialect.paramstyle == 'format' and '%s' or '?'
self.assert_compile(
matchtable.c.title.match('somstr'),
@@ -88,7 +88,7 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
@testing.fails_on('mysql+oursql', 'uses format')
@testing.fails_on('mysql+pyodbc', 'uses format')
@testing.fails_on('mysql+zxjdbc', 'uses format')
- def test_expression(self):
+ def test_expression_pyformat(self):
format = '%(title_1)s'
self.assert_compile(
matchtable.c.title.match('somstr'),
@@ -102,6 +102,14 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
fetchall())
eq_([2, 5], [r.id for r in results])
+ def test_not_match(self):
+ results = (matchtable.select().
+ where(~matchtable.c.title.match('python')).
+ order_by(matchtable.c.id).
+ execute().
+ fetchall())
+ eq_([1, 3, 4], [r.id for r in results])
+
def test_simple_match_with_apostrophe(self):
results = (matchtable.select().
where(matchtable.c.title.match("Matz's")).
@@ -109,6 +117,26 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
fetchall())
eq_([3], [r.id for r in results])
+ def test_return_value(self):
+ # test [ticket:3263]
+ result = testing.db.execute(
+ select([
+ matchtable.c.title.match('Agile Ruby Programming').label('ruby'),
+ matchtable.c.title.match('Dive Python').label('python'),
+ matchtable.c.title
+ ]).order_by(matchtable.c.id)
+ ).fetchall()
+ eq_(
+ result,
+ [
+ (2.0, 0.0, 'Agile Web Development with Ruby On Rails'),
+ (0.0, 2.0, 'Dive Into Python'),
+ (2.0, 0.0, "Programming Matz's Ruby"),
+ (0.0, 0.0, 'The Definitive Guide to Django'),
+ (0.0, 1.0, 'Python in a Nutshell')
+ ]
+ )
+
def test_or_match(self):
results1 = (matchtable.select().
where(or_(matchtable.c.title.match('nutshell'),
@@ -116,14 +144,13 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
order_by(matchtable.c.id).
execute().
fetchall())
- eq_([3, 5], [r.id for r in results1])
+ eq_([1, 3, 5], [r.id for r in results1])
results2 = (matchtable.select().
where(matchtable.c.title.match('nutshell ruby')).
order_by(matchtable.c.id).
execute().
fetchall())
- eq_([3, 5], [r.id for r in results2])
-
+ eq_([1, 3, 5], [r.id for r in results2])
def test_and_match(self):
results1 = (matchtable.select().
diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py
index a512b56fa..6841f397a 100644
--- a/test/dialect/postgresql/test_query.py
+++ b/test/dialect/postgresql/test_query.py
@@ -703,6 +703,12 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
matchtable.c.id).execute().fetchall()
eq_([2, 5], [r.id for r in results])
+ def test_not_match(self):
+ results = matchtable.select().where(
+ ~matchtable.c.title.match('python')).order_by(
+ matchtable.c.id).execute().fetchall()
+ eq_([1, 3, 4], [r.id for r in results])
+
def test_simple_match_with_apostrophe(self):
results = matchtable.select().where(
matchtable.c.title.match("Matz's")).execute().fetchall()
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index 8de71216e..0dda1fa45 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -323,6 +323,18 @@ class ReflectionTest(fixtures.TestBase):
eq_([c.name for c in t2.primary_key], ['t_id'])
@testing.provide_metadata
+ def test_has_temporary_table(self):
+ assert not testing.db.has_table("some_temp_table")
+ user_tmp = Table(
+ "some_temp_table", self.metadata,
+ Column("id", Integer, primary_key=True),
+ Column('name', String(50)),
+ prefixes=['TEMPORARY']
+ )
+ user_tmp.create(testing.db)
+ assert testing.db.has_table("some_temp_table")
+
+ @testing.provide_metadata
def test_cross_schema_reflection_one(self):
meta1 = self.metadata
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index a771c5d80..1e50b9070 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -180,6 +180,51 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
t.update().values(plain=5), 'UPDATE s SET "plain"=:"plain"'
)
+ def test_cte(self):
+ part = table(
+ 'part',
+ column('part'),
+ column('sub_part'),
+ column('quantity')
+ )
+
+ included_parts = select([
+ part.c.sub_part, part.c.part, part.c.quantity
+ ]).where(part.c.part == "p1").\
+ cte(name="included_parts", recursive=True).\
+ suffix_with(
+ "search depth first by part set ord1",
+ "cycle part set y_cycle to 1 default 0", dialect='oracle')
+
+ incl_alias = included_parts.alias("pr1")
+ parts_alias = part.alias("p")
+ included_parts = included_parts.union_all(
+ select([
+ parts_alias.c.sub_part,
+ parts_alias.c.part, parts_alias.c.quantity
+ ]).where(parts_alias.c.part == incl_alias.c.sub_part)
+ )
+
+ q = select([
+ included_parts.c.sub_part,
+ func.sum(included_parts.c.quantity).label('total_quantity')]).\
+ group_by(included_parts.c.sub_part)
+
+ self.assert_compile(
+ q,
+ "WITH included_parts(sub_part, part, quantity) AS "
+ "(SELECT part.sub_part AS sub_part, part.part AS part, "
+ "part.quantity AS quantity FROM part WHERE part.part = :part_1 "
+ "UNION ALL SELECT p.sub_part AS sub_part, p.part AS part, "
+ "p.quantity AS quantity FROM part p, included_parts pr1 "
+ "WHERE p.part = pr1.sub_part) "
+ "search depth first by part set ord1 cycle part set "
+ "y_cycle to 1 default 0 "
+ "SELECT included_parts.sub_part, sum(included_parts.quantity) "
+ "AS total_quantity FROM included_parts "
+ "GROUP BY included_parts.sub_part"
+ )
+
def test_limit(self):
t = table('sometable', column('col1'), column('col2'))
s = select([t])
@@ -687,6 +732,34 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
)
+ def test_create_table_compress(self):
+ m = MetaData()
+ tbl1 = Table('testtbl1', m, Column('data', Integer),
+ oracle_compress=True)
+ tbl2 = Table('testtbl2', m, Column('data', Integer),
+ oracle_compress="OLTP")
+
+ self.assert_compile(schema.CreateTable(tbl1),
+ "CREATE TABLE testtbl1 (data INTEGER) COMPRESS")
+ self.assert_compile(schema.CreateTable(tbl2),
+ "CREATE TABLE testtbl2 (data INTEGER) "
+ "COMPRESS FOR OLTP")
+
+ def test_create_index_bitmap_compress(self):
+ m = MetaData()
+ tbl = Table('testtbl', m, Column('data', Integer))
+ idx1 = Index('idx1', tbl.c.data, oracle_compress=True)
+ idx2 = Index('idx2', tbl.c.data, oracle_compress=1)
+ idx3 = Index('idx3', tbl.c.data, oracle_bitmap=True)
+
+ self.assert_compile(schema.CreateIndex(idx1),
+ "CREATE INDEX idx1 ON testtbl (data) COMPRESS")
+ self.assert_compile(schema.CreateIndex(idx2),
+ "CREATE INDEX idx2 ON testtbl (data) COMPRESS 1")
+ self.assert_compile(schema.CreateIndex(idx3),
+ "CREATE BITMAP INDEX idx3 ON testtbl (data)")
+
+
class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL):
def _dialect(self, server_version, **kw):
@@ -1727,6 +1800,58 @@ class UnsupportedIndexReflectTest(fixtures.TestBase):
m2 = MetaData(testing.db)
Table('test_index_reflect', m2, autoload=True)
+
+def all_tables_compression_missing():
+ try:
+ testing.db.execute('SELECT compression FROM all_tables')
+ return False
+ except:
+ return True
+
+
+def all_tables_compress_for_missing():
+ try:
+ testing.db.execute('SELECT compress_for FROM all_tables')
+ return False
+ except:
+ return True
+
+
+class TableReflectionTest(fixtures.TestBase):
+ __only_on__ = 'oracle'
+
+ @testing.provide_metadata
+ @testing.fails_if(all_tables_compression_missing)
+ def test_reflect_basic_compression(self):
+ metadata = self.metadata
+
+ tbl = Table('test_compress', metadata,
+ Column('data', Integer, primary_key=True),
+ oracle_compress=True)
+ metadata.create_all()
+
+ m2 = MetaData(testing.db)
+
+ tbl = Table('test_compress', m2, autoload=True)
+ # Don't hardcode the exact value, but it must be non-empty
+ assert tbl.dialect_options['oracle']['compress']
+
+ @testing.provide_metadata
+ @testing.fails_if(all_tables_compress_for_missing)
+ def test_reflect_oltp_compression(self):
+ metadata = self.metadata
+
+ tbl = Table('test_compress', metadata,
+ Column('data', Integer, primary_key=True),
+ oracle_compress="OLTP")
+ metadata.create_all()
+
+ m2 = MetaData(testing.db)
+
+ tbl = Table('test_compress', m2, autoload=True)
+ assert tbl.dialect_options['oracle']['compress'] == "OLTP"
+
+
class RoundTripIndexTest(fixtures.TestBase):
__only_on__ = 'oracle'
@@ -1744,6 +1869,10 @@ class RoundTripIndexTest(fixtures.TestBase):
# "group" is a keyword, so lower case
normalind = Index('tableind', table.c.id_b, table.c.group)
+ compress1 = Index('compress1', table.c.id_a, table.c.id_b,
+ oracle_compress=True)
+ compress2 = Index('compress2', table.c.id_a, table.c.id_b, table.c.col,
+ oracle_compress=1)
metadata.create_all()
mirror = MetaData(testing.db)
@@ -1792,8 +1921,15 @@ class RoundTripIndexTest(fixtures.TestBase):
)
assert (Index, ('id_b', ), True) in reflected
assert (Index, ('col', 'group'), True) in reflected
+
+ idx = reflected[(Index, ('id_a', 'id_b', ), False)]
+ assert idx.dialect_options['oracle']['compress'] == 2
+
+ idx = reflected[(Index, ('id_a', 'id_b', 'col', ), False)]
+ assert idx.dialect_options['oracle']['compress'] == 1
+
eq_(len(reflectedtable.constraints), 1)
- eq_(len(reflectedtable.indexes), 3)
+ eq_(len(reflectedtable.indexes), 5)
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py
index 124208dbe..22772d2fb 100644
--- a/test/dialect/test_sqlite.py
+++ b/test/dialect/test_sqlite.py
@@ -7,8 +7,8 @@ import datetime
from sqlalchemy.testing import eq_, assert_raises, \
assert_raises_message, is_
from sqlalchemy import Table, select, bindparam, Column,\
- MetaData, func, extract, ForeignKey, text, DefaultClause, and_, create_engine,\
- UniqueConstraint
+ MetaData, func, extract, ForeignKey, text, DefaultClause, and_, \
+ create_engine, UniqueConstraint
from sqlalchemy.types import Integer, String, Boolean, DateTime, Date, Time
from sqlalchemy import types as sqltypes
from sqlalchemy import event, inspect
@@ -21,6 +21,8 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
AssertsExecutionResults, engines
from sqlalchemy import testing
from sqlalchemy.schema import CreateTable
+from sqlalchemy.engine.reflection import Inspector
+
class TestTypes(fixtures.TestBase, AssertsExecutionResults):
@@ -32,9 +34,10 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
"""
meta = MetaData(testing.db)
- t = Table('bool_table', meta, Column('id', Integer,
- primary_key=True), Column('boo',
- Boolean(create_constraint=False)))
+ t = Table(
+ 'bool_table', meta,
+ Column('id', Integer, primary_key=True),
+ Column('boo', Boolean(create_constraint=False)))
try:
meta.create_all()
testing.db.execute("INSERT INTO bool_table (id, boo) "
@@ -69,28 +72,31 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
ValueError,
"Couldn't parse %s string." % disp,
lambda: testing.db.execute(
- text("select 'ASDF' as value", typemap={"value":typ})
+ text("select 'ASDF' as value", typemap={"value": typ})
).scalar()
)
def test_native_datetime(self):
dbapi = testing.db.dialect.dbapi
- connect_args = {'detect_types': dbapi.PARSE_DECLTYPES \
- | dbapi.PARSE_COLNAMES}
- engine = engines.testing_engine(options={'connect_args'
- : connect_args, 'native_datetime': True})
- t = Table('datetest', MetaData(),
- Column('id', Integer, primary_key=True),
- Column('d1', Date), Column('d2', sqltypes.TIMESTAMP))
+ connect_args = {
+ 'detect_types': dbapi.PARSE_DECLTYPES | dbapi.PARSE_COLNAMES}
+ engine = engines.testing_engine(
+ options={'connect_args': connect_args, 'native_datetime': True})
+ t = Table(
+ 'datetest', MetaData(),
+ Column('id', Integer, primary_key=True),
+ Column('d1', Date), Column('d2', sqltypes.TIMESTAMP))
t.create(engine)
try:
- engine.execute(t.insert(), {'d1': datetime.date(2010, 5,
- 10),
- 'd2': datetime.datetime( 2010, 5, 10, 12, 15, 25,
- )})
+ engine.execute(t.insert(), {
+ 'd1': datetime.date(2010, 5, 10),
+ 'd2': datetime.datetime(2010, 5, 10, 12, 15, 25)
+ })
row = engine.execute(t.select()).first()
- eq_(row, (1, datetime.date(2010, 5, 10),
- datetime.datetime( 2010, 5, 10, 12, 15, 25, )))
+ eq_(
+ row,
+ (1, datetime.date(2010, 5, 10),
+ datetime.datetime(2010, 5, 10, 12, 15, 25)))
r = engine.execute(func.current_date()).scalar()
assert isinstance(r, util.string_types)
finally:
@@ -100,15 +106,16 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
@testing.provide_metadata
def test_custom_datetime(self):
sqlite_date = sqlite.DATETIME(
- # 2004-05-21T00:00:00
- storage_format="%(year)04d-%(month)02d-%(day)02d"
- "T%(hour)02d:%(minute)02d:%(second)02d",
- regexp=r"(\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)",
- )
+ # 2004-05-21T00:00:00
+ storage_format="%(year)04d-%(month)02d-%(day)02d"
+ "T%(hour)02d:%(minute)02d:%(second)02d",
+ regexp=r"(\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)",
+ )
t = Table('t', self.metadata, Column('d', sqlite_date))
self.metadata.create_all(testing.db)
- testing.db.execute(t.insert().
- values(d=datetime.datetime(2010, 10, 15, 12, 37, 0)))
+ testing.db.execute(
+ t.insert().
+ values(d=datetime.datetime(2010, 10, 15, 12, 37, 0)))
testing.db.execute("insert into t (d) values ('2004-05-21T00:00:00')")
eq_(
testing.db.execute("select * from t order by d").fetchall(),
@@ -116,21 +123,70 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
)
eq_(
testing.db.execute(select([t.c.d]).order_by(t.c.d)).fetchall(),
- [(datetime.datetime(2004, 5, 21, 0, 0),),
- (datetime.datetime(2010, 10, 15, 12, 37),)]
+ [
+ (datetime.datetime(2004, 5, 21, 0, 0),),
+ (datetime.datetime(2010, 10, 15, 12, 37),)]
+ )
+
+ @testing.provide_metadata
+ def test_custom_datetime_text_affinity(self):
+ sqlite_date = sqlite.DATETIME(
+ storage_format="%(year)04d%(month)02d%(day)02d"
+ "%(hour)02d%(minute)02d%(second)02d",
+ regexp=r"(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})",
+ )
+ t = Table('t', self.metadata, Column('d', sqlite_date))
+ self.metadata.create_all(testing.db)
+ testing.db.execute(
+ t.insert().
+ values(d=datetime.datetime(2010, 10, 15, 12, 37, 0)))
+ testing.db.execute("insert into t (d) values ('20040521000000')")
+ eq_(
+ testing.db.execute("select * from t order by d").fetchall(),
+ [('20040521000000',), ('20101015123700',)]
+ )
+ eq_(
+ testing.db.execute(select([t.c.d]).order_by(t.c.d)).fetchall(),
+ [
+ (datetime.datetime(2004, 5, 21, 0, 0),),
+ (datetime.datetime(2010, 10, 15, 12, 37),)]
+ )
+
+ @testing.provide_metadata
+ def test_custom_date_text_affinity(self):
+ sqlite_date = sqlite.DATE(
+ storage_format="%(year)04d%(month)02d%(day)02d",
+ regexp=r"(\d{4})(\d{2})(\d{2})",
+ )
+ t = Table('t', self.metadata, Column('d', sqlite_date))
+ self.metadata.create_all(testing.db)
+ testing.db.execute(
+ t.insert().
+ values(d=datetime.date(2010, 10, 15)))
+ testing.db.execute("insert into t (d) values ('20040521')")
+ eq_(
+ testing.db.execute("select * from t order by d").fetchall(),
+ [('20040521',), ('20101015',)]
+ )
+ eq_(
+ testing.db.execute(select([t.c.d]).order_by(t.c.d)).fetchall(),
+ [
+ (datetime.date(2004, 5, 21),),
+ (datetime.date(2010, 10, 15),)]
)
@testing.provide_metadata
def test_custom_date(self):
sqlite_date = sqlite.DATE(
- # 2004-05-21T00:00:00
- storage_format="%(year)04d|%(month)02d|%(day)02d",
- regexp=r"(\d+)\|(\d+)\|(\d+)",
- )
+ # 2004-05-21T00:00:00
+ storage_format="%(year)04d|%(month)02d|%(day)02d",
+ regexp=r"(\d+)\|(\d+)\|(\d+)",
+ )
t = Table('t', self.metadata, Column('d', sqlite_date))
self.metadata.create_all(testing.db)
- testing.db.execute(t.insert().
- values(d=datetime.date(2010, 10, 15)))
+ testing.db.execute(
+ t.insert().
+ values(d=datetime.date(2010, 10, 15)))
testing.db.execute("insert into t (d) values ('2004|05|21')")
eq_(
testing.db.execute("select * from t order by d").fetchall(),
@@ -138,11 +194,11 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
)
eq_(
testing.db.execute(select([t.c.d]).order_by(t.c.d)).fetchall(),
- [(datetime.date(2004, 5, 21),),
- (datetime.date(2010, 10, 15),)]
+ [
+ (datetime.date(2004, 5, 21),),
+ (datetime.date(2010, 10, 15),)]
)
-
def test_no_convert_unicode(self):
"""test no utf-8 encoding occurs"""
@@ -156,7 +212,7 @@ class TestTypes(fixtures.TestBase, AssertsExecutionResults):
sqltypes.CHAR(convert_unicode=True),
sqltypes.Unicode(),
sqltypes.UnicodeText(),
- ):
+ ):
bindproc = t.dialect_impl(dialect).bind_processor(dialect)
assert not bindproc or \
isinstance(bindproc(util.u('some string')), util.text_type)
@@ -198,6 +254,7 @@ class DateTimeTest(fixtures.TestBase, AssertsCompiledSQL):
rp = sldt.result_processor(None, None)
eq_(rp(bp(dt)), dt)
+
class DateTest(fixtures.TestBase, AssertsCompiledSQL):
def test_default(self):
@@ -221,6 +278,7 @@ class DateTest(fixtures.TestBase, AssertsCompiledSQL):
rp = sldt.result_processor(None, None)
eq_(rp(bp(dt)), dt)
+
class TimeTest(fixtures.TestBase, AssertsCompiledSQL):
def test_default(self):
@@ -333,8 +391,9 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL):
@testing.provide_metadata
def test_boolean_default(self):
- t = Table("t", self.metadata,
- Column("x", Boolean, server_default=sql.false()))
+ t = Table(
+ "t", self.metadata,
+ Column("x", Boolean, server_default=sql.false()))
t.create(testing.db)
testing.db.execute(t.insert())
testing.db.execute(t.insert().values(x=True))
@@ -351,7 +410,6 @@ class DefaultsTest(fixtures.TestBase, AssertsCompiledSQL):
eq_(info['default'], '3')
-
class DialectTest(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = 'sqlite'
@@ -372,7 +430,7 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
Column('true', Integer),
Column('false', Integer),
Column('column', Integer),
- )
+ )
try:
meta.create_all()
t.insert().execute(safe=1)
@@ -403,8 +461,8 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
table1 = Table('django_admin_log', metadata, autoload=True)
table2 = Table('django_content_type', metadata, autoload=True)
j = table1.join(table2)
- assert j.onclause.compare(table1.c.content_type_id
- == table2.c.id)
+ assert j.onclause.compare(
+ table1.c.content_type_id == table2.c.id)
@testing.provide_metadata
def test_quoted_identifiers_functional_two(self):
@@ -426,8 +484,8 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
# unfortunately, still can't do this; sqlite quadruples
# up the quotes on the table name here for pragma foreign_key_list
- #testing.db.execute(r'''
- #CREATE TABLE """b""" (
+ # testing.db.execute(r'''
+ # CREATE TABLE """b""" (
# """id""" integer NOT NULL PRIMARY KEY,
# """aid""" integer NULL
# REFERENCES """a""" ("""id""")
@@ -439,14 +497,13 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
#table2 = Table(r'"b"', metadata, autoload=True)
#j = table1.join(table2)
- #assert j.onclause.compare(table1.c['"id"']
+ # assert j.onclause.compare(table1.c['"id"']
# == table2.c['"aid"'])
def test_legacy_quoted_identifiers_unit(self):
dialect = sqlite.dialect()
dialect._broken_fk_pragma_quotes = True
-
for row in [
(0, 'target', 'tid', 'id'),
(0, '"target"', 'tid', 'id'),
@@ -457,7 +514,9 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
fks = {}
fkeys = []
dialect._parse_fk(fks, fkeys, *row)
- eq_(fkeys, [{
+ eq_(
+ fkeys,
+ [{
'referred_table': 'target',
'referred_columns': ['id'],
'referred_schema': None,
@@ -470,17 +529,17 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
# amazingly, pysqlite seems to still deliver cursor.description
# as encoded bytes in py2k
- t = Table('x', self.metadata,
- Column(u('méil'), Integer, primary_key=True),
- Column(ue('\u6e2c\u8a66'), Integer),
- )
+ t = Table(
+ 'x', self.metadata,
+ Column(u('méil'), Integer, primary_key=True),
+ Column(ue('\u6e2c\u8a66'), Integer),
+ )
self.metadata.create_all(testing.db)
result = testing.db.execute(t.select())
assert u('méil') in result.keys()
assert ue('\u6e2c\u8a66') in result.keys()
-
def test_file_path_is_absolute(self):
d = pysqlite_dialect.dialect()
eq_(
@@ -498,48 +557,51 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
e = create_engine('sqlite+pysqlite:///foo.db')
assert e.pool.__class__ is pool.NullPool
+ @testing.provide_metadata
def test_dont_reflect_autoindex(self):
- meta = MetaData(testing.db)
- t = Table('foo', meta, Column('bar', String, primary_key=True))
+ meta = self.metadata
+ Table('foo', meta, Column('bar', String, primary_key=True))
meta.create_all()
- from sqlalchemy.engine.reflection import Inspector
- try:
- inspector = Inspector(testing.db)
- eq_(inspector.get_indexes('foo'), [])
- eq_(inspector.get_indexes('foo',
- include_auto_indexes=True), [{'unique': 1, 'name'
- : 'sqlite_autoindex_foo_1', 'column_names': ['bar']}])
- finally:
- meta.drop_all()
+ inspector = Inspector(testing.db)
+ eq_(inspector.get_indexes('foo'), [])
+ eq_(
+ inspector.get_indexes('foo', include_auto_indexes=True),
+ [{
+ 'unique': 1,
+ 'name': 'sqlite_autoindex_foo_1',
+ 'column_names': ['bar']}])
+ @testing.provide_metadata
def test_create_index_with_schema(self):
"""Test creation of index with explicit schema"""
- meta = MetaData(testing.db)
- t = Table('foo', meta, Column('bar', String, index=True),
- schema='main')
- try:
- meta.create_all()
- finally:
- meta.drop_all()
+ meta = self.metadata
+ Table(
+ 'foo', meta, Column('bar', String, index=True),
+ schema='main')
+ meta.create_all()
+ inspector = Inspector(testing.db)
+ eq_(
+ inspector.get_indexes('foo', schema='main'),
+ [{'unique': 0, 'name': u'ix_main_foo_bar',
+ 'column_names': [u'bar']}])
+ @testing.provide_metadata
def test_get_unique_constraints(self):
- meta = MetaData(testing.db)
- t1 = Table('foo', meta, Column('f', Integer),
- UniqueConstraint('f', name='foo_f'))
- t2 = Table('bar', meta, Column('b', Integer),
- UniqueConstraint('b', name='bar_b'),
- prefixes=['TEMPORARY'])
+ meta = self.metadata
+ Table(
+ 'foo', meta, Column('f', Integer),
+ UniqueConstraint('f', name='foo_f'))
+ Table(
+ 'bar', meta, Column('b', Integer),
+ UniqueConstraint('b', name='bar_b'),
+ prefixes=['TEMPORARY'])
meta.create_all()
- from sqlalchemy.engine.reflection import Inspector
- try:
- inspector = Inspector(testing.db)
- eq_(inspector.get_unique_constraints('foo'),
- [{'column_names': [u'f'], 'name': u'foo_f'}])
- eq_(inspector.get_unique_constraints('bar'),
- [{'column_names': [u'b'], 'name': u'bar_b'}])
- finally:
- meta.drop_all()
+ inspector = Inspector(testing.db)
+ eq_(inspector.get_unique_constraints('foo'),
+ [{'column_names': [u'f'], 'name': u'foo_f'}])
+ eq_(inspector.get_unique_constraints('bar'),
+ [{'column_names': [u'b'], 'name': u'bar_b'}])
class AttachedMemoryDBTest(fixtures.TestBase):
@@ -662,7 +724,7 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
'epoch': '%s',
'dow': '%w',
'week': '%W',
- }
+ }
for field, subst in mapping.items():
self.assert_compile(select([extract(field, t.c.col1)]),
"SELECT CAST(STRFTIME('%s', t.col1) AS "
@@ -685,53 +747,57 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
def test_constraints_with_schemas(self):
metadata = MetaData()
- t1 = Table('t1', metadata,
- Column('id', Integer, primary_key=True),
- schema='master')
- t2 = Table('t2', metadata,
- Column('id', Integer, primary_key=True),
- Column('t1_id', Integer, ForeignKey('master.t1.id')),
- schema='master'
- )
- t3 = Table('t3', metadata,
- Column('id', Integer, primary_key=True),
- Column('t1_id', Integer, ForeignKey('master.t1.id')),
- schema='alternate'
- )
- t4 = Table('t4', metadata,
- Column('id', Integer, primary_key=True),
- Column('t1_id', Integer, ForeignKey('master.t1.id')),
- )
+ Table(
+ 't1', metadata,
+ Column('id', Integer, primary_key=True),
+ schema='master')
+ t2 = Table(
+ 't2', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('t1_id', Integer, ForeignKey('master.t1.id')),
+ schema='master'
+ )
+ t3 = Table(
+ 't3', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('t1_id', Integer, ForeignKey('master.t1.id')),
+ schema='alternate'
+ )
+ t4 = Table(
+ 't4', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('t1_id', Integer, ForeignKey('master.t1.id')),
+ )
# schema->schema, generate REFERENCES with no schema name
self.assert_compile(
schema.CreateTable(t2),
- "CREATE TABLE master.t2 ("
- "id INTEGER NOT NULL, "
- "t1_id INTEGER, "
- "PRIMARY KEY (id), "
- "FOREIGN KEY(t1_id) REFERENCES t1 (id)"
- ")"
+ "CREATE TABLE master.t2 ("
+ "id INTEGER NOT NULL, "
+ "t1_id INTEGER, "
+ "PRIMARY KEY (id), "
+ "FOREIGN KEY(t1_id) REFERENCES t1 (id)"
+ ")"
)
# schema->different schema, don't generate REFERENCES
self.assert_compile(
schema.CreateTable(t3),
- "CREATE TABLE alternate.t3 ("
- "id INTEGER NOT NULL, "
- "t1_id INTEGER, "
- "PRIMARY KEY (id)"
- ")"
+ "CREATE TABLE alternate.t3 ("
+ "id INTEGER NOT NULL, "
+ "t1_id INTEGER, "
+ "PRIMARY KEY (id)"
+ ")"
)
# same for local schema
self.assert_compile(
schema.CreateTable(t4),
- "CREATE TABLE t4 ("
- "id INTEGER NOT NULL, "
- "t1_id INTEGER, "
- "PRIMARY KEY (id)"
- ")"
+ "CREATE TABLE t4 ("
+ "id INTEGER NOT NULL, "
+ "t1_id INTEGER, "
+ "PRIMARY KEY (id)"
+ ")"
)
@@ -756,30 +822,37 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk1(self):
- self._test_empty_insert(Table('a', MetaData(testing.db),
- Column('id', Integer,
- primary_key=True)))
+ self._test_empty_insert(
+ Table(
+ 'a', MetaData(testing.db),
+ Column('id', Integer, primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk2(self):
- assert_raises(exc.DBAPIError, self._test_empty_insert, Table('b'
- , MetaData(testing.db), Column('x', Integer,
- primary_key=True), Column('y', Integer,
- primary_key=True)))
+ assert_raises(
+ exc.DBAPIError, self._test_empty_insert,
+ Table(
+ 'b', MetaData(testing.db),
+ Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk3(self):
- assert_raises(exc.DBAPIError, self._test_empty_insert, Table('c'
- , MetaData(testing.db), Column('x', Integer,
- primary_key=True), Column('y', Integer,
- DefaultClause('123'), primary_key=True)))
+ assert_raises(
+ exc.DBAPIError, self._test_empty_insert,
+ Table(
+ 'c', MetaData(testing.db),
+ Column('x', Integer, primary_key=True),
+ Column('y', Integer, DefaultClause('123'), primary_key=True)))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_pk4(self):
- self._test_empty_insert(Table('d', MetaData(testing.db),
- Column('x', Integer, primary_key=True),
- Column('y', Integer, DefaultClause('123'
- ))))
+ self._test_empty_insert(
+ Table(
+ 'd', MetaData(testing.db),
+ Column('x', Integer, primary_key=True),
+ Column('y', Integer, DefaultClause('123'))
+ ))
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_nopk1(self):
@@ -788,9 +861,10 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
@testing.exclude('sqlite', '<', (3, 3, 8), 'no database support')
def test_empty_insert_nopk2(self):
- self._test_empty_insert(Table('f', MetaData(testing.db),
- Column('x', Integer), Column('y',
- Integer)))
+ self._test_empty_insert(
+ Table(
+ 'f', MetaData(testing.db),
+ Column('x', Integer), Column('y', Integer)))
def test_inserts_with_spaces(self):
tbl = Table('tbl', MetaData('sqlite:///'), Column('with space',
@@ -800,8 +874,8 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults):
tbl.insert().execute({'without': 123})
assert list(tbl.select().execute()) == [(None, 123)]
tbl.insert().execute({'with space': 456})
- assert list(tbl.select().execute()) == [(None, 123), (456,
- None)]
+ assert list(tbl.select().execute()) == [
+ (None, 123), (456, None)]
finally:
tbl.drop()
@@ -817,6 +891,8 @@ def full_text_search_missing():
except:
return True
+metadata = cattable = matchtable = None
+
class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -845,19 +921,20 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
""")
matchtable = Table('matchtable', metadata, autoload=True)
metadata.create_all()
- cattable.insert().execute([{'id': 1, 'description': 'Python'},
- {'id': 2, 'description': 'Ruby'}])
- matchtable.insert().execute([{'id': 1, 'title'
- : 'Agile Web Development with Rails'
- , 'category_id': 2}, {'id': 2,
- 'title': 'Dive Into Python',
- 'category_id': 1}, {'id': 3, 'title'
- : "Programming Matz's Ruby",
- 'category_id': 2}, {'id': 4, 'title'
- : 'The Definitive Guide to Django',
- 'category_id': 1}, {'id': 5, 'title'
- : 'Python in a Nutshell',
- 'category_id': 1}])
+ cattable.insert().execute(
+ [{'id': 1, 'description': 'Python'},
+ {'id': 2, 'description': 'Ruby'}])
+ matchtable.insert().execute(
+ [
+ {'id': 1, 'title': 'Agile Web Development with Rails',
+ 'category_id': 2},
+ {'id': 2, 'title': 'Dive Into Python', 'category_id': 1},
+ {'id': 3, 'title': "Programming Matz's Ruby",
+ 'category_id': 2},
+ {'id': 4, 'title': 'The Definitive Guide to Django',
+ 'category_id': 1},
+ {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1}
+ ])
@classmethod
def teardown_class(cls):
@@ -869,35 +946,38 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
def test_simple_match(self):
results = \
- matchtable.select().where(matchtable.c.title.match('python'
- )).order_by(matchtable.c.id).execute().fetchall()
+ matchtable.select().where(
+ matchtable.c.title.match('python')).\
+ order_by(matchtable.c.id).execute().fetchall()
eq_([2, 5], [r.id for r in results])
def test_simple_prefix_match(self):
results = \
- matchtable.select().where(matchtable.c.title.match('nut*'
- )).execute().fetchall()
+ matchtable.select().where(
+ matchtable.c.title.match('nut*')).execute().fetchall()
eq_([5], [r.id for r in results])
def test_or_match(self):
results2 = \
matchtable.select().where(
- matchtable.c.title.match('nutshell OR ruby'
- )).order_by(matchtable.c.id).execute().fetchall()
+ matchtable.c.title.match('nutshell OR ruby')).\
+ order_by(matchtable.c.id).execute().fetchall()
eq_([3, 5], [r.id for r in results2])
def test_and_match(self):
results2 = \
matchtable.select().where(
- matchtable.c.title.match('python nutshell'
- )).execute().fetchall()
+ matchtable.c.title.match('python nutshell')
+ ).execute().fetchall()
eq_([5], [r.id for r in results2])
def test_match_across_joins(self):
- results = matchtable.select().where(and_(cattable.c.id
- == matchtable.c.category_id,
- cattable.c.description.match('Ruby'
- ))).order_by(matchtable.c.id).execute().fetchall()
+ results = matchtable.select().where(
+ and_(
+ cattable.c.id == matchtable.c.category_id,
+ cattable.c.description.match('Ruby')
+ )
+ ).order_by(matchtable.c.id).execute().fetchall()
eq_([1, 3], [r.id for r in results])
@@ -907,10 +987,11 @@ class AutoIncrementTest(fixtures.TestBase, AssertsCompiledSQL):
table = Table('autoinctable', MetaData(), Column('id', Integer,
primary_key=True), Column('x', Integer,
default=None), sqlite_autoincrement=True)
- self.assert_compile(schema.CreateTable(table),
- 'CREATE TABLE autoinctable (id INTEGER NOT '
- 'NULL PRIMARY KEY AUTOINCREMENT, x INTEGER)'
- , dialect=sqlite.dialect())
+ self.assert_compile(
+ schema.CreateTable(table),
+ 'CREATE TABLE autoinctable (id INTEGER NOT '
+ 'NULL PRIMARY KEY AUTOINCREMENT, x INTEGER)',
+ dialect=sqlite.dialect())
def test_sqlite_autoincrement_constraint(self):
table = Table(
@@ -920,7 +1001,7 @@ class AutoIncrementTest(fixtures.TestBase, AssertsCompiledSQL):
Column('x', Integer, default=None),
UniqueConstraint('x'),
sqlite_autoincrement=True,
- )
+ )
self.assert_compile(schema.CreateTable(table),
'CREATE TABLE autoinctable (id INTEGER NOT '
'NULL PRIMARY KEY AUTOINCREMENT, x '
@@ -944,7 +1025,7 @@ class AutoIncrementTest(fixtures.TestBase, AssertsCompiledSQL):
MetaData(),
Column('id', MyInteger, primary_key=True),
sqlite_autoincrement=True,
- )
+ )
self.assert_compile(schema.CreateTable(table),
'CREATE TABLE autoinctable (id INTEGER NOT '
'NULL PRIMARY KEY AUTOINCREMENT)',
@@ -958,7 +1039,8 @@ class ReflectHeadlessFKsTest(fixtures.TestBase):
testing.db.execute("CREATE TABLE a (id INTEGER PRIMARY KEY)")
# this syntax actually works on other DBs perhaps we'd want to add
# tests to test_reflection
- testing.db.execute("CREATE TABLE b (id INTEGER PRIMARY KEY REFERENCES a)")
+ testing.db.execute(
+ "CREATE TABLE b (id INTEGER PRIMARY KEY REFERENCES a)")
def teardown(self):
testing.db.execute("drop table b")
@@ -971,21 +1053,24 @@ class ReflectHeadlessFKsTest(fixtures.TestBase):
assert b.c.id.references(a.c.id)
+
class ReflectFKConstraintTest(fixtures.TestBase):
__only_on__ = 'sqlite'
def setup(self):
testing.db.execute("CREATE TABLE a1 (id INTEGER PRIMARY KEY)")
testing.db.execute("CREATE TABLE a2 (id INTEGER PRIMARY KEY)")
- testing.db.execute("CREATE TABLE b (id INTEGER PRIMARY KEY, "
- "FOREIGN KEY(id) REFERENCES a1(id),"
- "FOREIGN KEY(id) REFERENCES a2(id)"
- ")")
- testing.db.execute("CREATE TABLE c (id INTEGER, "
- "CONSTRAINT bar PRIMARY KEY(id),"
- "CONSTRAINT foo1 FOREIGN KEY(id) REFERENCES a1(id),"
- "CONSTRAINT foo2 FOREIGN KEY(id) REFERENCES a2(id)"
- ")")
+ testing.db.execute(
+ "CREATE TABLE b (id INTEGER PRIMARY KEY, "
+ "FOREIGN KEY(id) REFERENCES a1(id),"
+ "FOREIGN KEY(id) REFERENCES a2(id)"
+ ")")
+ testing.db.execute(
+ "CREATE TABLE c (id INTEGER, "
+ "CONSTRAINT bar PRIMARY KEY(id),"
+ "CONSTRAINT foo1 FOREIGN KEY(id) REFERENCES a1(id),"
+ "CONSTRAINT foo2 FOREIGN KEY(id) REFERENCES a2(id)"
+ ")")
def teardown(self):
testing.db.execute("drop table c")
@@ -1005,7 +1090,8 @@ class ReflectFKConstraintTest(fixtures.TestBase):
def test_name_not_none(self):
# we don't have names for PK constraints,
# it appears we get back None in the pragma for
- # FKs also (also it doesn't even appear to be documented on sqlite's docs
+ # FKs also (also it doesn't even appear to be documented on
+ # sqlite's docs
# at http://www.sqlite.org/pragma.html#pragma_foreign_key_list
# how did we ever know that's the "name" field ??)
@@ -1018,6 +1104,7 @@ class ReflectFKConstraintTest(fixtures.TestBase):
class SavepointTest(fixtures.TablesTest):
+
"""test that savepoints work when we use the correct event setup"""
__only_on__ = 'sqlite'
@@ -1081,7 +1168,7 @@ class SavepointTest(fixtures.TablesTest):
connection = self.bind.connect()
transaction = connection.begin()
connection.execute(users.insert(), user_id=1, user_name='user1')
- trans2 = connection.begin_nested()
+ connection.begin_nested()
connection.execute(users.insert(), user_id=2, user_name='user2')
trans3 = connection.begin()
connection.execute(users.insert(), user_id=3, user_name='user3')
@@ -1127,6 +1214,16 @@ class TypeReflectionTest(fixtures.TestBase):
(sqltypes.Time, sqltypes.TIME()),
(sqltypes.BOOLEAN, sqltypes.BOOLEAN()),
(sqltypes.Boolean, sqltypes.BOOLEAN()),
+ (sqlite.DATE(
+ storage_format="%(year)04d%(month)02d%(day)02d",
+ ), sqltypes.DATE()),
+ (sqlite.TIME(
+ storage_format="%(hour)02d%(minute)02d%(second)02d",
+ ), sqltypes.TIME()),
+ (sqlite.DATETIME(
+ storage_format="%(year)04d%(month)02d%(day)02d"
+ "%(hour)02d%(minute)02d%(second)02d",
+ ), sqltypes.DATETIME()),
]
def _unsupported_args_fixture(self):
@@ -1169,8 +1266,8 @@ class TypeReflectionTest(fixtures.TestBase):
if warnings:
def go():
return dialect._resolve_type_affinity(from_)
- final_type = testing.assert_warnings(go,
- ["Could not instantiate"], regex=True)
+ final_type = testing.assert_warnings(
+ go, ["Could not instantiate"], regex=True)
else:
final_type = dialect._resolve_type_affinity(from_)
expected_type = type(to_)
@@ -1186,8 +1283,8 @@ class TypeReflectionTest(fixtures.TestBase):
if warnings:
def go():
return inspector.get_columns("foo")[0]
- col_info = testing.assert_warnings(go,
- ["Could not instantiate"], regex=True)
+ col_info = testing.assert_warnings(
+ go, ["Could not instantiate"], regex=True)
else:
col_info = inspector.get_columns("foo")[0]
expected_type = type(to_)
@@ -1207,7 +1304,8 @@ class TypeReflectionTest(fixtures.TestBase):
self._test_lookup_direct(self._fixed_lookup_fixture())
def test_lookup_direct_unsupported_args(self):
- self._test_lookup_direct(self._unsupported_args_fixture(), warnings=True)
+ self._test_lookup_direct(
+ self._unsupported_args_fixture(), warnings=True)
def test_lookup_direct_type_affinity(self):
self._test_lookup_direct(self._type_affinity_fixture())
@@ -1216,8 +1314,8 @@ class TypeReflectionTest(fixtures.TestBase):
self._test_round_trip(self._fixed_lookup_fixture())
def test_round_trip_direct_unsupported_args(self):
- self._test_round_trip(self._unsupported_args_fixture(), warnings=True)
+ self._test_round_trip(
+ self._unsupported_args_fixture(), warnings=True)
def test_round_trip_direct_type_affinity(self):
self._test_round_trip(self._type_affinity_fixture())
-
diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py
index 391b92144..4a3da7d1c 100644
--- a/test/engine/test_parseconnect.py
+++ b/test/engine/test_parseconnect.py
@@ -1,12 +1,15 @@
from sqlalchemy.testing import assert_raises, eq_, assert_raises_message
-from sqlalchemy.util.compat import configparser, StringIO
import sqlalchemy.engine.url as url
from sqlalchemy import create_engine, engine_from_config, exc, pool
from sqlalchemy.engine.default import DefaultDialect
import sqlalchemy as tsa
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
-from sqlalchemy.testing.mock import Mock, MagicMock, patch
+from sqlalchemy.testing.mock import Mock, MagicMock
+from sqlalchemy import event
+from sqlalchemy import select
+
+dialect = None
class ParseConnectTest(fixtures.TestBase):
@@ -31,21 +34,25 @@ class ParseConnectTest(fixtures.TestBase):
'dbtype://username:password@/database',
'dbtype:////usr/local/_xtest@example.com/members.db',
'dbtype://username:apples%2Foranges@hostspec/database',
- 'dbtype://username:password@[2001:da8:2004:1000:202:116:160:90]/database?foo=bar',
- 'dbtype://username:password@[2001:da8:2004:1000:202:116:160:90]:80/database?foo=bar'
- ):
+ 'dbtype://username:password@[2001:da8:2004:1000:202:116:160:90]'
+ '/database?foo=bar',
+ 'dbtype://username:password@[2001:da8:2004:1000:202:116:160:90]:80'
+ '/database?foo=bar'
+ ):
u = url.make_url(text)
assert u.drivername in ('dbtype', 'dbtype+apitype')
assert u.username in ('username', None)
assert u.password in ('password', 'apples/oranges', None)
- assert u.host in ('hostspec', '127.0.0.1',
- '2001:da8:2004:1000:202:116:160:90', '', None), u.host
- assert u.database in ('database',
- '/usr/local/_xtest@example.com/members.db',
- '/usr/db_file.db', ':memory:', '',
- 'foo/bar/im/a/file',
- 'E:/work/src/LEM/db/hello.db', None), u.database
+ assert u.host in (
+ 'hostspec', '127.0.0.1',
+ '2001:da8:2004:1000:202:116:160:90', '', None), u.host
+ assert u.database in (
+ 'database',
+ '/usr/local/_xtest@example.com/members.db',
+ '/usr/db_file.db', ':memory:', '',
+ 'foo/bar/im/a/file',
+ 'E:/work/src/LEM/db/hello.db', None), u.database
eq_(str(u), text)
def test_rfc1738_password(self):
@@ -53,13 +60,17 @@ class ParseConnectTest(fixtures.TestBase):
eq_(u.password, "pass word + other:words")
eq_(str(u), "dbtype://user:pass word + other%3Awords@host/dbname")
- u = url.make_url('dbtype://username:apples%2Foranges@hostspec/database')
+ u = url.make_url(
+ 'dbtype://username:apples%2Foranges@hostspec/database')
eq_(u.password, "apples/oranges")
eq_(str(u), 'dbtype://username:apples%2Foranges@hostspec/database')
- u = url.make_url('dbtype://username:apples%40oranges%40%40@hostspec/database')
+ u = url.make_url(
+ 'dbtype://username:apples%40oranges%40%40@hostspec/database')
eq_(u.password, "apples@oranges@@")
- eq_(str(u), 'dbtype://username:apples%40oranges%40%40@hostspec/database')
+ eq_(
+ str(u),
+ 'dbtype://username:apples%40oranges%40%40@hostspec/database')
u = url.make_url('dbtype://username%40:@hostspec/database')
eq_(u.password, '')
@@ -70,23 +81,23 @@ class ParseConnectTest(fixtures.TestBase):
eq_(u.password, 'pass/word')
eq_(str(u), 'dbtype://username:pass%2Fword@hostspec/database')
+
class DialectImportTest(fixtures.TestBase):
def test_import_base_dialects(self):
-
# the globals() somehow makes it for the exec() + nose3.
for name in (
- 'mysql',
- 'firebird',
- 'postgresql',
- 'sqlite',
- 'oracle',
- 'mssql',
- ):
+ 'mysql',
+ 'firebird',
+ 'postgresql',
+ 'sqlite',
+ 'oracle',
+ 'mssql'):
exec ('from sqlalchemy.dialects import %s\ndialect = '
'%s.dialect()' % (name, name), globals())
eq_(dialect.name, name)
+
class CreateEngineTest(fixtures.TestBase):
"""test that create_engine arguments of different types get
propagated properly"""
@@ -97,26 +108,28 @@ class CreateEngineTest(fixtures.TestBase):
create_engine('postgresql://scott:tiger@somehost/test?foobe'
'r=12&lala=18&fooz=somevalue', module=dbapi,
_initialize=False)
- c = e.connect()
+ e.connect()
def test_kwargs(self):
dbapi = MockDBAPI(foober=12, lala=18, hoho={'this': 'dict'},
fooz='somevalue')
e = \
- create_engine('postgresql://scott:tiger@somehost/test?fooz='
- 'somevalue', connect_args={'foober': 12,
- 'lala': 18, 'hoho': {'this': 'dict'}},
- module=dbapi, _initialize=False)
- c = e.connect()
-
+ create_engine(
+ 'postgresql://scott:tiger@somehost/test?fooz='
+ 'somevalue', connect_args={
+ 'foober': 12,
+ 'lala': 18, 'hoho': {'this': 'dict'}},
+ module=dbapi, _initialize=False)
+ e.connect()
def test_engine_from_config(self):
dbapi = mock_dbapi
- config = \
- {'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test'\
- '?fooz=somevalue', 'sqlalchemy.pool_recycle': '50',
- 'sqlalchemy.echo': 'true'}
+ config = {
+ 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test'
+ '?fooz=somevalue',
+ 'sqlalchemy.pool_recycle': '50',
+ 'sqlalchemy.echo': 'true'}
e = engine_from_config(config, module=dbapi, _initialize=False)
assert e.pool._recycle == 50
@@ -125,7 +138,6 @@ class CreateEngineTest(fixtures.TestBase):
'z=somevalue')
assert e.echo is True
-
def test_engine_from_config_custom(self):
from sqlalchemy import util
from sqlalchemy.dialects import registry
@@ -143,8 +155,9 @@ class CreateEngineTest(fixtures.TestBase):
global dialect
dialect = MyDialect
- registry.register("mockdialect.barb",
- ".".join(tokens[0:-1]), tokens[-1])
+ registry.register(
+ "mockdialect.barb",
+ ".".join(tokens[0:-1]), tokens[-1])
config = {
"sqlalchemy.url": "mockdialect+barb://",
@@ -155,7 +168,6 @@ class CreateEngineTest(fixtures.TestBase):
eq_(e.dialect.foobar, 5)
eq_(e.dialect.bathoho, False)
-
def test_custom(self):
dbapi = MockDBAPI(foober=12, lala=18, hoho={'this': 'dict'},
fooz='somevalue')
@@ -169,7 +181,7 @@ class CreateEngineTest(fixtures.TestBase):
e = create_engine('postgresql://', creator=connect,
module=dbapi, _initialize=False)
- c = e.connect()
+ e.connect()
def test_recycle(self):
dbapi = MockDBAPI(foober=12, lala=18, hoho={'this': 'dict'},
@@ -188,8 +200,9 @@ class CreateEngineTest(fixtures.TestBase):
(True, pool.reset_rollback),
(False, pool.reset_none),
]:
- e = create_engine('postgresql://', pool_reset_on_return=value,
- module=dbapi, _initialize=False)
+ e = create_engine(
+ 'postgresql://', pool_reset_on_return=value,
+ module=dbapi, _initialize=False)
assert e.pool._reset_on_return is expected
assert_raises(
@@ -217,7 +230,7 @@ class CreateEngineTest(fixtures.TestBase):
lala=5,
use_ansi=True,
module=mock_dbapi,
- )
+ )
assert_raises(TypeError, create_engine, 'postgresql://',
lala=5, module=mock_dbapi)
assert_raises(TypeError, create_engine, 'sqlite://', lala=5,
@@ -229,17 +242,159 @@ class CreateEngineTest(fixtures.TestBase):
def test_wraps_connect_in_dbapi(self):
e = create_engine('sqlite://')
sqlite3 = e.dialect.dbapi
-
dbapi = MockDBAPI()
dbapi.Error = sqlite3.Error,
dbapi.ProgrammingError = sqlite3.ProgrammingError
- dbapi.connect = Mock(side_effect=sqlite3.ProgrammingError("random error"))
+ dbapi.connect = Mock(
+ side_effect=sqlite3.ProgrammingError("random error"))
try:
create_engine('sqlite://', module=dbapi).connect()
assert False
except tsa.exc.DBAPIError as de:
assert not de.connection_invalidated
+ @testing.requires.sqlite
+ def test_handle_error_event_connect(self):
+ e = create_engine('sqlite://')
+ dbapi = MockDBAPI()
+ sqlite3 = e.dialect.dbapi
+ dbapi.Error = sqlite3.Error,
+ dbapi.ProgrammingError = sqlite3.ProgrammingError
+ dbapi.connect = Mock(
+ side_effect=sqlite3.ProgrammingError("random error"))
+
+ class MySpecialException(Exception):
+ pass
+
+ eng = create_engine('sqlite://', module=dbapi)
+
+ @event.listens_for(eng, "handle_error")
+ def handle_error(ctx):
+ assert ctx.engine is eng
+ assert ctx.connection is None
+ raise MySpecialException("failed operation")
+
+ assert_raises(
+ MySpecialException,
+ eng.connect
+ )
+
+ @testing.requires.sqlite
+ def test_handle_error_event_revalidate(self):
+ e = create_engine('sqlite://')
+ dbapi = MockDBAPI()
+ sqlite3 = e.dialect.dbapi
+ dbapi.Error = sqlite3.Error,
+ dbapi.ProgrammingError = sqlite3.ProgrammingError
+
+ class MySpecialException(Exception):
+ pass
+
+ eng = create_engine('sqlite://', module=dbapi, _initialize=False)
+
+ @event.listens_for(eng, "handle_error")
+ def handle_error(ctx):
+ assert ctx.engine is eng
+ assert ctx.connection is conn
+ assert isinstance(ctx.sqlalchemy_exception, exc.ProgrammingError)
+ raise MySpecialException("failed operation")
+
+ conn = eng.connect()
+ conn.invalidate()
+
+ dbapi.connect = Mock(
+ side_effect=sqlite3.ProgrammingError("random error"))
+
+ assert_raises(
+ MySpecialException,
+ getattr, conn, 'connection'
+ )
+
+ @testing.requires.sqlite
+ def test_handle_error_event_implicit_revalidate(self):
+ e = create_engine('sqlite://')
+ dbapi = MockDBAPI()
+ sqlite3 = e.dialect.dbapi
+ dbapi.Error = sqlite3.Error,
+ dbapi.ProgrammingError = sqlite3.ProgrammingError
+
+ class MySpecialException(Exception):
+ pass
+
+ eng = create_engine('sqlite://', module=dbapi, _initialize=False)
+
+ @event.listens_for(eng, "handle_error")
+ def handle_error(ctx):
+ assert ctx.engine is eng
+ assert ctx.connection is conn
+ assert isinstance(ctx.sqlalchemy_exception, exc.ProgrammingError)
+ raise MySpecialException("failed operation")
+
+ conn = eng.connect()
+ conn.invalidate()
+
+ dbapi.connect = Mock(
+ side_effect=sqlite3.ProgrammingError("random error"))
+
+ assert_raises(
+ MySpecialException,
+ conn.execute, select([1])
+ )
+
+ @testing.requires.sqlite
+ def test_handle_error_custom_connect(self):
+ e = create_engine('sqlite://')
+
+ dbapi = MockDBAPI()
+ sqlite3 = e.dialect.dbapi
+ dbapi.Error = sqlite3.Error,
+ dbapi.ProgrammingError = sqlite3.ProgrammingError
+
+ class MySpecialException(Exception):
+ pass
+
+ def custom_connect():
+ raise sqlite3.ProgrammingError("random error")
+
+ eng = create_engine('sqlite://', module=dbapi, creator=custom_connect)
+
+ @event.listens_for(eng, "handle_error")
+ def handle_error(ctx):
+ assert ctx.engine is eng
+ assert ctx.connection is None
+ raise MySpecialException("failed operation")
+
+ assert_raises(
+ MySpecialException,
+ eng.connect
+ )
+
+ @testing.requires.sqlite
+ def test_handle_error_event_connect_invalidate_flag(self):
+ e = create_engine('sqlite://')
+ dbapi = MockDBAPI()
+ sqlite3 = e.dialect.dbapi
+ dbapi.Error = sqlite3.Error,
+ dbapi.ProgrammingError = sqlite3.ProgrammingError
+ dbapi.connect = Mock(
+ side_effect=sqlite3.ProgrammingError(
+ "Cannot operate on a closed database."))
+
+ class MySpecialException(Exception):
+ pass
+
+ eng = create_engine('sqlite://', module=dbapi)
+
+ @event.listens_for(eng, "handle_error")
+ def handle_error(ctx):
+ assert ctx.is_disconnect
+ ctx.is_disconnect = False
+
+ try:
+ eng.connect()
+ assert False
+ except tsa.exc.DBAPIError as de:
+ assert not de.connection_invalidated
@testing.requires.sqlite
def test_dont_touch_non_dbapi_exception_on_connect(self):
@@ -260,10 +415,12 @@ class CreateEngineTest(fixtures.TestBase):
eq_(is_disconnect.call_count, 0)
def test_ensure_dialect_does_is_disconnect_no_conn(self):
- """test that is_disconnect() doesn't choke if no connection, cursor given."""
+ """test that is_disconnect() doesn't choke if no connection,
+ cursor given."""
dialect = testing.db.dialect
dbapi = dialect.dbapi
- assert not dialect.is_disconnect(dbapi.OperationalError("test"), None, None)
+ assert not dialect.is_disconnect(
+ dbapi.OperationalError("test"), None, None)
@testing.requires.sqlite
def test_invalidate_on_connect(self):
@@ -280,8 +437,9 @@ class CreateEngineTest(fixtures.TestBase):
dbapi = MockDBAPI()
dbapi.Error = sqlite3.Error,
dbapi.ProgrammingError = sqlite3.ProgrammingError
- dbapi.connect = Mock(side_effect=sqlite3.ProgrammingError(
- "Cannot operate on a closed database."))
+ dbapi.connect = Mock(
+ side_effect=sqlite3.ProgrammingError(
+ "Cannot operate on a closed database."))
try:
create_engine('sqlite://', module=dbapi).connect()
assert False
@@ -313,7 +471,7 @@ class CreateEngineTest(fixtures.TestBase):
echo_pool=None,
module=mock_dbapi,
_initialize=False,
- )
+ )
assert e.pool._recycle == 50
# these args work for QueuePool
@@ -325,7 +483,7 @@ class CreateEngineTest(fixtures.TestBase):
poolclass=tsa.pool.QueuePool,
module=mock_dbapi,
_initialize=False,
- )
+ )
# but not SingletonThreadPool
@@ -338,7 +496,8 @@ class CreateEngineTest(fixtures.TestBase):
poolclass=tsa.pool.SingletonThreadPool,
module=mock_sqlite_dbapi,
_initialize=False,
- )
+ )
+
class TestRegNewDBAPI(fixtures.TestBase):
def test_register_base(self):
@@ -361,7 +520,8 @@ class TestRegNewDBAPI(fixtures.TestBase):
global dialect
dialect = MockDialect
- registry.register("mockdialect.foob", ".".join(tokens[0:-1]), tokens[-1])
+ registry.register(
+ "mockdialect.foob", ".".join(tokens[0:-1]), tokens[-1])
e = create_engine("mockdialect+foob://")
assert isinstance(e.dialect, MockDialect)
@@ -373,13 +533,16 @@ class TestRegNewDBAPI(fixtures.TestBase):
e = create_engine("mysql+my_mock_dialect://")
assert isinstance(e.dialect, MockDialect)
+
class MockDialect(DefaultDialect):
@classmethod
def dbapi(cls, **kw):
return MockDBAPI()
+
def MockDBAPI(**assert_kwargs):
connection = Mock(get_server_version_info=Mock(return_value='5.0'))
+
def connect(*args, **kwargs):
for k in assert_kwargs:
assert k in kwargs, 'key %s not present in dictionary' % k
@@ -389,12 +552,12 @@ def MockDBAPI(**assert_kwargs):
return connection
return MagicMock(
- sqlite_version_info=(99, 9, 9,),
- version_info=(99, 9, 9,),
- sqlite_version='99.9.9',
- paramstyle='named',
- connect=Mock(side_effect=connect)
- )
+ sqlite_version_info=(99, 9, 9,),
+ version_info=(99, 9, 9,),
+ sqlite_version='99.9.9',
+ paramstyle='named',
+ connect=Mock(side_effect=connect)
+ )
mock_dbapi = MockDBAPI()
mock_sqlite_dbapi = msd = MockDBAPI()
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index 4500ada6a..0efce87ce 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -517,7 +517,7 @@ class RealReconnectTest(fixtures.TestBase):
assert c1.invalidated
assert c1_branch.invalidated
- c1_branch._revalidate_connection()
+ c1_branch._revalidate_connection(_wrap=True)
assert not c1.invalidated
assert not c1_branch.invalidated
@@ -535,7 +535,7 @@ class RealReconnectTest(fixtures.TestBase):
assert c1.invalidated
assert c1_branch.invalidated
- c1._revalidate_connection()
+ c1._revalidate_connection(_wrap=True)
assert not c1.invalidated
assert not c1_branch.invalidated
diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py
index c18b8b944..087610333 100644
--- a/test/engine/test_reflection.py
+++ b/test/engine/test_reflection.py
@@ -799,6 +799,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
@testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
+ @testing.requires.check_constraints
@testing.provide_metadata
def test_reserved(self):
diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py
index eba47dbec..c519032b3 100644
--- a/test/orm/test_joins.py
+++ b/test/orm/test_joins.py
@@ -362,6 +362,8 @@ class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL):
class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
@classmethod
def setup_mappers(cls):
User = cls.classes.User
@@ -430,6 +432,16 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
sess.query(literal_column('x'), User).join, Address
)
+ def test_isouter_flag(self):
+ User = self.classes.User
+
+ self.assert_compile(
+ create_session().query(User).join('orders', isouter=True),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users LEFT OUTER JOIN orders ON users.id = orders.user_id"
+ )
+
+
def test_multi_tuple_form(self):
"""test the 'tuple' form of join, now superseded
by the two-element join() form.
@@ -1091,7 +1103,6 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
[User(name='fred')]
)
-
def test_aliased_classes(self):
User, Address = self.classes.User, self.classes.Address
@@ -1240,7 +1251,6 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
def test_joins_from_adapted_entities(self):
User = self.classes.User
-
# test for #1853
session = create_session()
@@ -1277,6 +1287,45 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
'anon_2 ON anon_2.id = anon_1.users_id',
use_default_dialect=True)
+ def test_joins_from_adapted_entities_isouter(self):
+ User = self.classes.User
+
+ # test for #1853
+
+ session = create_session()
+ first = session.query(User)
+ second = session.query(User)
+ unioned = first.union(second)
+ subquery = session.query(User.id).subquery()
+ join = subquery, subquery.c.id == User.id
+ joined = unioned.join(*join, isouter=True)
+ self.assert_compile(joined,
+ 'SELECT anon_1.users_id AS '
+ 'anon_1_users_id, anon_1.users_name AS '
+ 'anon_1_users_name FROM (SELECT users.id '
+ 'AS users_id, users.name AS users_name '
+ 'FROM users UNION SELECT users.id AS '
+ 'users_id, users.name AS users_name FROM '
+ 'users) AS anon_1 LEFT OUTER JOIN (SELECT '
+ 'users.id AS id FROM users) AS anon_2 ON '
+ 'anon_2.id = anon_1.users_id',
+ use_default_dialect=True)
+
+ first = session.query(User.id)
+ second = session.query(User.id)
+ unioned = first.union(second)
+ subquery = session.query(User.id).subquery()
+ join = subquery, subquery.c.id == User.id
+ joined = unioned.join(*join, isouter=True)
+ self.assert_compile(joined,
+ 'SELECT anon_1.users_id AS anon_1_users_id '
+ 'FROM (SELECT users.id AS users_id FROM '
+ 'users UNION SELECT users.id AS users_id '
+ 'FROM users) AS anon_1 LEFT OUTER JOIN '
+ '(SELECT users.id AS id FROM users) AS '
+ 'anon_2 ON anon_2.id = anon_1.users_id',
+ use_default_dialect=True)
+
def test_reset_joinpoint(self):
User = self.classes.User
@@ -1285,6 +1334,9 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
result = create_session().query(User).join('orders', 'items', aliased=aliased).filter_by(id=3).reset_joinpoint().join('orders','address', aliased=aliased).filter_by(id=1).all()
assert [User(id=7, name='jack')] == result
+ result = create_session().query(User).join('orders', 'items', aliased=aliased, isouter=True).filter_by(id=3).reset_joinpoint().join('orders','address', aliased=aliased, isouter=True).filter_by(id=1).all()
+ assert [User(id=7, name='jack')] == result
+
result = create_session().query(User).outerjoin('orders', 'items', aliased=aliased).filter_by(id=3).reset_joinpoint().outerjoin('orders','address', aliased=aliased).filter_by(id=1).all()
assert [User(id=7, name='jack')] == result
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 0a9cbfc71..63ba1a207 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -222,7 +222,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(Address, addresses)
s = create_session()
a = s.query(Address).from_statement(
- sa.select([addresses.c.id, addresses.c.user_id])).first()
+ sa.select([addresses.c.id, addresses.c.user_id]).
+ order_by(addresses.c.id)).first()
eq_(a.user_id, 7)
eq_(a.id, 1)
# email address auto-defers
diff --git a/test/orm/test_of_type.py b/test/orm/test_of_type.py
index 836d85cc7..b9ebc2daf 100644
--- a/test/orm/test_of_type.py
+++ b/test/orm/test_of_type.py
@@ -14,6 +14,7 @@ from .inheritance._poly_fixtures import Company, Person, Engineer, Manager, Boss
_PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
_PolymorphicAliasedJoins
+
class _PolymorphicTestBase(object):
__dialect__ = 'default'
@@ -191,6 +192,21 @@ class _PolymorphicTestBase(object):
)
self.assert_sql_count(testing.db, go, 3)
+ def test_joinedload_stacked_of_type(self):
+ sess = Session()
+
+ def go():
+ eq_(
+ sess.query(Company).
+ filter_by(company_id=1).
+ options(
+ joinedload(Company.employees.of_type(Manager)),
+ joinedload(Company.employees.of_type(Engineer))
+ ).all(),
+ [self._company_with_emps_fixture()[0]]
+ )
+ self.assert_sql_count(testing.db, go, 2)
+
class PolymorphicPolymorphicTest(_PolymorphicTestBase, _PolymorphicPolymorphic):
def _polymorphic_join_target(self, cls):
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index f14ad7864..354bbe5b1 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -1569,6 +1569,7 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
assert [] == sess.query(User).order_by(User.id)[3:3]
assert [] == sess.query(User).order_by(User.id)[0:0]
+ @testing.requires.bound_limit_offset
def test_select_with_bindparam_offset_limit(self):
"""Does a query allow bindparam for the limit?"""
User = self.classes.User
diff --git a/test/profiles.txt b/test/profiles.txt
index dc4d05264..97ef13873 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -1,15 +1,15 @@
# /Users/classic/dev/sqlalchemy/test/profiles.txt
# This file is written out on a per-environment basis.
-# For each test in aaa_profiling, the corresponding function and
+# For each test in aaa_profiling, the corresponding function and
# environment is located within this file. If it doesn't exist,
# the test is skipped.
-# If a callcount does exist, it is compared to what we received.
+# If a callcount does exist, it is compared to what we received.
# assertions are raised if the counts do not match.
-#
-# To add a new callcount test, apply the function_call_count
-# decorator and re-run the tests using the --write-profiles
+#
+# To add a new callcount test, apply the function_call_count
+# decorator and re-run the tests using the --write-profiles
# option - this file will be rewritten including the new count.
-#
+#
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
@@ -132,8 +132,6 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycop
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 40149
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 19280
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 28297
-
-
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 29138
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 32398
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 37327
@@ -148,8 +146,6 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 30054
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 27144
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 30149
-
-
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 29068
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 32197
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 31179
@@ -164,8 +160,6 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_nocextensions 17988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17988
-
-
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18988
@@ -180,8 +174,6 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 122553
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 162315
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 165111
-
-
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 125352
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 169566
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 171364
@@ -196,8 +188,6 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19219
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 22288
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 22530
-
-
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 19492
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23067
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 23271
@@ -212,8 +202,6 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_ce
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1348
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1601
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1626
-
-
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1355
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1656
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1671
@@ -228,8 +216,6 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 117,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 117,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 117,18
-
-
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 122,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 122,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 122,19
@@ -244,8 +230,6 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psy
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 91
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 91
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 91
-
-
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 78
@@ -260,8 +244,6 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_ps
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_nocextensions 31
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 31
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 31
-
-
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 24
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 24
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 24
@@ -276,8 +258,6 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_po
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_nocextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 8
-
-
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 9
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 9
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 9
@@ -286,22 +266,16 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_po
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
-
-
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 45
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 45
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 45
-
-
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 43
-
-
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_cextensions 43
@@ -309,22 +283,16 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
-
-
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 80
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 80
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 80
-
-
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 78
-
-
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_cextensions 78
@@ -332,22 +300,16 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
-
-
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 15
-
-
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 16
-
-
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_cextensions 16
@@ -355,22 +317,16 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
-
-
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 514
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15534
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20501
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35521
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 457
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15477
-
-
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 462
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14462
-
-
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 462
@@ -378,22 +334,16 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
-
-
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 514
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45534
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20501
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35521
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 457
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15477
-
-
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 462
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14462
-
-
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 462
@@ -403,16 +353,16 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5562,277,3697,11893,1106,1968,2433
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 5606,277,3929,13595,1223,2011,2692
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5238,259,3577,11529,1077,1886,2439
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5260,259,3673,12701,1171,1893,2631
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5221,259,3577,11529,1077,1883,2439
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5243,259,3673,12701,1171,1890,2631
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5238,273,3577,11529,1077,1886,2439
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5260,273,3673,12701,1171,1893,2631
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5221,273,3577,11529,1077,1883,2439
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5243,273,3697,12796,1187,1923,2653
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6098,399,6666,18183,1118,2606
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 6169,404,6898,19614,1226,2671
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 6008,386,6716,18339,1091,2630
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 6093,391,6820,19366,1177,2659
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6007,386,6716,18339,1091,2630
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6087,391,6820,19366,1177,2659
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6389,407,6826,18499,1134,2661
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 6480,412,7058,19930,1242,2726
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 6268,394,6860,18613,1107,2679
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 6361,399,6964,19640,1193,2708
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6275,394,6860,18613,1107,2679
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6360,399,6964,19640,1193,2708
diff --git a/test/requirements.py b/test/requirements.py
index 05ca8d717..ffbdfba23 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -30,7 +30,7 @@ def exclude(db, op, spec, description=None):
class DefaultRequirements(SuiteRequirements):
@property
def deferrable_or_no_constraints(self):
- """Target database must support derferable constraints."""
+ """Target database must support deferrable constraints."""
return skip_if([
no_support('firebird', 'not supported by database'),
@@ -39,6 +39,12 @@ class DefaultRequirements(SuiteRequirements):
])
@property
+ def check_constraints(self):
+ """Target database must support check constraints."""
+
+ return exclusions.open()
+
+ @property
def named_constraints(self):
"""target database must support names for constraints."""
@@ -121,6 +127,17 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def temporary_tables(self):
+ """target database supports temporary tables"""
+ return skip_if(
+ ["mssql"], "sql server has some other syntax?"
+ )
+
+ @property
+ def temp_table_reflection(self):
+ return self.temporary_tables
+
+ @property
def reflectable_autoincrement(self):
"""Target database must support tables that can automatically generate
PKs assuming they were reflected.
@@ -443,6 +460,7 @@ class DefaultRequirements(SuiteRequirements):
)
+
@property
def emulated_lastrowid(self):
""""target dialect retrieves cursor.lastrowid or an equivalent
@@ -760,6 +778,17 @@ class DefaultRequirements(SuiteRequirements):
"Not supported on MySQL + Windows"
)
+ @property
+ def mssql_freetds(self):
+ return only_on(
+ LambdaPredicate(
+ lambda config: (
+ (against(config, 'mssql+pyodbc') and
+ config.db.dialect.freetds)
+ or against(config, 'mssql+pymssql')
+ )
+ )
+ )
@property
def selectone(self):
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index bfafed599..428fc8986 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -435,6 +435,19 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=default.DefaultDialect(paramstyle='pyformat')
)
+ def test_anon_param_name_on_keys(self):
+ self.assert_compile(
+ keyed.insert(),
+ "INSERT INTO keyed (x, y, z) VALUES (%(colx)s, %(coly)s, %(z)s)",
+ dialect=default.DefaultDialect(paramstyle='pyformat')
+ )
+ self.assert_compile(
+ keyed.c.coly == 5,
+ "keyed.y = %(coly_1)s",
+ checkparams={'coly_1': 5},
+ dialect=default.DefaultDialect(paramstyle='pyformat')
+ )
+
def test_dupe_columns(self):
"""test that deduping is performed against clause
element identity, not rendered result."""
@@ -2427,7 +2440,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"""SELECT /*+ "QuotedName" idx1 */ "QuotedName".col1 """
"""FROM "QuotedName" WHERE "QuotedName".col1 > :col1_1"""),
(s7, oracle_d,
- """SELECT /*+ SomeName idx1 */ "SomeName".col1 FROM """
+ """SELECT /*+ "SomeName" idx1 */ "SomeName".col1 FROM """
""""QuotedName" "SomeName" WHERE "SomeName".col1 > :col1_1"""),
]:
self.assert_compile(
@@ -3424,3 +3437,32 @@ class ResultMapTest(fixtures.TestBase):
is_(
comp.result_map['t1_a'][1][2], t1.c.a
)
+
+ def test_insert_with_select_values(self):
+ astring = Column('a', String)
+ aint = Column('a', Integer)
+ m = MetaData()
+ Table('t1', m, astring)
+ t2 = Table('t2', m, aint)
+
+ stmt = t2.insert().values(a=select([astring])).returning(aint)
+ comp = stmt.compile(dialect=postgresql.dialect())
+ eq_(
+ comp.result_map,
+ {'a': ('a', (aint, 'a', 'a'), aint.type)}
+ )
+
+ def test_insert_from_select(self):
+ astring = Column('a', String)
+ aint = Column('a', Integer)
+ m = MetaData()
+ Table('t1', m, astring)
+ t2 = Table('t2', m, aint)
+
+ stmt = t2.insert().from_select(['a'], select([astring])).\
+ returning(aint)
+ comp = stmt.compile(dialect=postgresql.dialect())
+ eq_(
+ comp.result_map,
+ {'a': ('a', (aint, 'a', 'a'), aint.type)}
+ )
diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py
index 2f054dac1..c0b5806ac 100644
--- a/test/sql/test_constraints.py
+++ b/test/sql/test_constraints.py
@@ -130,6 +130,7 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults):
*assertions
)
+ @testing.requires.check_constraints
@testing.provide_metadata
def test_check_constraint_create(self):
metadata = self.metadata
diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py
index b907fe649..c7906dcb7 100644
--- a/test/sql/test_cte.py
+++ b/test/sql/test_cte.py
@@ -462,3 +462,33 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL):
'FROM "order" JOIN regional_sales AS anon_1 '
'ON anon_1."order" = "order"."order"'
)
+
+ def test_suffixes(self):
+ orders = table('order', column('order'))
+ s = select([orders.c.order]).cte("regional_sales")
+ s = s.suffix_with("pg suffix", dialect='postgresql')
+ s = s.suffix_with('oracle suffix', dialect='oracle')
+ stmt = select([orders]).where(orders.c.order > s.c.order)
+
+ self.assert_compile(
+ stmt,
+ 'WITH regional_sales AS (SELECT "order"."order" AS "order" '
+ 'FROM "order") SELECT "order"."order" FROM "order", '
+ 'regional_sales WHERE "order"."order" > regional_sales."order"'
+ )
+
+ self.assert_compile(
+ stmt,
+ 'WITH regional_sales AS (SELECT "order"."order" AS "order" '
+ 'FROM "order") oracle suffix SELECT "order"."order" FROM "order", '
+ 'regional_sales WHERE "order"."order" > regional_sales."order"',
+ dialect='oracle'
+ )
+
+ self.assert_compile(
+ stmt,
+ 'WITH regional_sales AS (SELECT "order"."order" AS "order" '
+ 'FROM "order") pg suffix SELECT "order"."order" FROM "order", '
+ 'regional_sales WHERE "order"."order" > regional_sales."order"',
+ dialect='postgresql'
+ ) \ No newline at end of file
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index 3c55242fd..52ecf88c5 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -227,6 +227,50 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
fk1 = ForeignKeyConstraint(('foo', ), ('bar', ), table=t1)
assert fk1 in t1.constraints
+ def test_fk_constraint_col_collection_w_table(self):
+ c1 = Column('foo', Integer)
+ c2 = Column('bar', Integer)
+ m = MetaData()
+ t1 = Table('t', m, c1, c2)
+ fk1 = ForeignKeyConstraint(('foo', ), ('bar', ), table=t1)
+ eq_(dict(fk1.columns), {"foo": c1})
+
+ def test_fk_constraint_col_collection_no_table(self):
+ fk1 = ForeignKeyConstraint(('foo', 'bat'), ('bar', 'hoho'))
+ eq_(dict(fk1.columns), {})
+ eq_(fk1.column_keys, ['foo', 'bat'])
+ eq_(fk1._col_description, 'foo, bat')
+ eq_(fk1._elements, {"foo": fk1.elements[0], "bat": fk1.elements[1]})
+
+ def test_fk_constraint_col_collection_no_table_real_cols(self):
+ c1 = Column('foo', Integer)
+ c2 = Column('bar', Integer)
+ fk1 = ForeignKeyConstraint((c1, ), (c2, ))
+ eq_(dict(fk1.columns), {})
+ eq_(fk1.column_keys, ['foo'])
+ eq_(fk1._col_description, 'foo')
+ eq_(fk1._elements, {"foo": fk1.elements[0]})
+
+ def test_fk_constraint_col_collection_added_to_table(self):
+ c1 = Column('foo', Integer)
+ m = MetaData()
+ fk1 = ForeignKeyConstraint(('foo', ), ('bar', ))
+ Table('t', m, c1, fk1)
+ eq_(dict(fk1.columns), {"foo": c1})
+ eq_(fk1._elements, {"foo": fk1.elements[0]})
+
+ def test_fk_constraint_col_collection_via_fk(self):
+ fk = ForeignKey('bar')
+ c1 = Column('foo', Integer, fk)
+ m = MetaData()
+ t1 = Table('t', m, c1)
+ fk1 = fk.constraint
+ eq_(fk1.column_keys, ['foo'])
+ assert fk1 in t1.constraints
+ eq_(fk1.column_keys, ['foo'])
+ eq_(dict(fk1.columns), {"foo": c1})
+ eq_(fk1._elements, {"foo": fk})
+
def test_fk_no_such_parent_col_error(self):
meta = MetaData()
a = Table('a', meta, Column('a', Integer))
@@ -486,6 +530,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
class ToMetaDataTest(fixtures.TestBase, ComparesTables):
+ @testing.requires.check_constraints
def test_copy(self):
from sqlalchemy.testing.schema import Table
meta = MetaData()
@@ -1115,8 +1160,10 @@ class InfoTest(fixtures.TestBase):
t = Table('x', MetaData(), info={'foo': 'bar'})
eq_(t.info, {'foo': 'bar'})
+
class TableTest(fixtures.TestBase, AssertsCompiledSQL):
+ @testing.requires.temporary_tables
@testing.skip_if('mssql', 'different col format')
def test_prefixes(self):
from sqlalchemy import Table
@@ -1429,6 +1476,46 @@ class SchemaTypeTest(fixtures.TestBase):
m1.create_all(testing.db)
+ def test_boolean_constraint_type_doesnt_double(self):
+ m1 = MetaData()
+
+ t1 = Table('x', m1, Column("flag", Boolean()))
+ eq_(
+ len([
+ c for c in t1.constraints
+ if isinstance(c, CheckConstraint)]),
+ 1
+ )
+ m2 = MetaData()
+ t2 = t1.tometadata(m2)
+
+ eq_(
+ len([
+ c for c in t2.constraints
+ if isinstance(c, CheckConstraint)]),
+ 1
+ )
+
+ def test_enum_constraint_type_doesnt_double(self):
+ m1 = MetaData()
+
+ t1 = Table('x', m1, Column("flag", Enum('a', 'b', 'c')))
+ eq_(
+ len([
+ c for c in t1.constraints
+ if isinstance(c, CheckConstraint)]),
+ 1
+ )
+ m2 = MetaData()
+ t2 = t1.tometadata(m2)
+
+ eq_(
+ len([
+ c for c in t2.constraints
+ if isinstance(c, CheckConstraint)]),
+ 1
+ )
+
class SchemaTest(fixtures.TestBase, AssertsCompiledSQL):
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index e8ad88511..f8ac1528f 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -12,7 +12,8 @@ from sqlalchemy import exc
from sqlalchemy.engine import default
from sqlalchemy.sql.elements import _literal_as_text
from sqlalchemy.schema import Column, Table, MetaData
-from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, Boolean
+from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, \
+ Boolean, NullType, MatchType
from sqlalchemy.dialects import mysql, firebird, postgresql, oracle, \
sqlite, mssql
from sqlalchemy import util
@@ -1619,6 +1620,31 @@ class MatchTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"CONTAINS (mytable.myid, :myid_1)",
dialect=oracle.dialect())
+ def test_match_is_now_matchtype(self):
+ expr = self.table1.c.myid.match('somstr')
+ assert expr.type._type_affinity is MatchType()._type_affinity
+ assert isinstance(expr.type, MatchType)
+
+ def test_boolean_inversion_postgresql(self):
+ self.assert_compile(
+ ~self.table1.c.myid.match('somstr'),
+ "NOT mytable.myid @@ to_tsquery(%(myid_1)s)",
+ dialect=postgresql.dialect())
+
+ def test_boolean_inversion_mysql(self):
+ # because mysql doesnt have native boolean
+ self.assert_compile(
+ ~self.table1.c.myid.match('somstr'),
+ "NOT MATCH (mytable.myid) AGAINST (%s IN BOOLEAN MODE)",
+ dialect=mysql.dialect())
+
+ def test_boolean_inversion_mssql(self):
+ # because mssql doesnt have native boolean
+ self.assert_compile(
+ ~self.table1.c.myid.match('somstr'),
+ "NOT CONTAINS (mytable.myid, :myid_1)",
+ dialect=mssql.dialect())
+
class ComposedLikeOperatorsTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py
index 79a0b38a5..cd9f632b9 100644
--- a/test/sql/test_returning.py
+++ b/test/sql/test_returning.py
@@ -160,6 +160,39 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults):
eq_(result2.fetchall(), [(2, False), ])
+class CompositeStatementTest(fixtures.TestBase):
+ __requires__ = 'returning',
+ __backend__ = True
+
+ @testing.provide_metadata
+ def test_select_doesnt_pollute_result(self):
+ class MyType(TypeDecorator):
+ impl = Integer
+
+ def process_result_value(self, value, dialect):
+ raise Exception("I have not been selected")
+
+ t1 = Table(
+ 't1', self.metadata,
+ Column('x', MyType())
+ )
+
+ t2 = Table(
+ 't2', self.metadata,
+ Column('x', Integer)
+ )
+
+ self.metadata.create_all(testing.db)
+ with testing.db.connect() as conn:
+ conn.execute(t1.insert().values(x=5))
+
+ stmt = t2.insert().values(
+ x=select([t1.c.x]).as_scalar()).returning(t2.c.x)
+
+ result = conn.execute(stmt)
+ eq_(result.scalar(), 5)
+
+
class SequenceReturningTest(fixtures.TestBase):
__requires__ = 'returning', 'sequences'
__backend__ = True
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index efa0f90ae..26dc6c842 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -558,7 +558,7 @@ class TypeCoerceCastTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
class MyType(types.TypeDecorator):
- impl = String
+ impl = String(50)
def process_bind_param(self, value, dialect):
return "BIND_IN" + str(value)