summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--LICENSE2
-rw-r--r--doc/build/changelog/changelog_10.rst137
-rw-r--r--doc/build/changelog/changelog_11.rst208
-rw-r--r--doc/build/changelog/migration_11.rst619
-rw-r--r--doc/build/conf.py2
-rw-r--r--doc/build/copyright.rst2
-rw-r--r--doc/build/core/selectable.rst7
-rw-r--r--doc/build/core/tutorial.rst68
-rw-r--r--doc/build/glossary.rst1
-rw-r--r--doc/build/orm/cascades.rst4
-rw-r--r--doc/build/orm/extensions/mutable.rst7
-rw-r--r--doc/build/orm/relationship_persistence.rst2
-rw-r--r--doc/build/orm/tutorial.rst2
-rw-r--r--examples/performance/short_selects.py4
-rw-r--r--lib/sqlalchemy/__init__.py3
-rw-r--r--lib/sqlalchemy/cextension/processors.c2
-rw-r--r--lib/sqlalchemy/cextension/resultproxy.c2
-rw-r--r--lib/sqlalchemy/cextension/utils.c2
-rw-r--r--lib/sqlalchemy/connectors/__init__.py2
-rw-r--r--lib/sqlalchemy/connectors/mxodbc.py2
-rw-r--r--lib/sqlalchemy/connectors/pyodbc.py2
-rw-r--r--lib/sqlalchemy/connectors/zxJDBC.py2
-rw-r--r--lib/sqlalchemy/databases/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/__init__.py11
-rw-r--r--lib/sqlalchemy/dialects/firebird/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/base.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/fdb.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/kinterbasdb.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/adodbapi.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py124
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/mxodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py9
-rw-r--r--lib/sqlalchemy/dialects/mssql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py61
-rw-r--r--lib/sqlalchemy/dialects/mysql/cymysql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/enumerated.py44
-rw-r--r--lib/sqlalchemy/dialects/mysql/gaerdbms.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/json.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqlconnector.py15
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqldb.py14
-rw-r--r--lib/sqlalchemy/dialects/mysql/oursql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/pymysql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/reflection.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/types.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py18
-rw-r--r--lib/sqlalchemy/dialects/oracle/zxjdbc.py5
-rw-r--r--lib/sqlalchemy/dialects/postgres.py18
-rw-r--r--lib/sqlalchemy/dialects/postgresql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/array.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py13
-rw-r--r--lib/sqlalchemy/dialects/postgresql/ext.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/hstore.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/json.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pypostgresql.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/ranges.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py12
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlcipher.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/mxodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/pysybase.py2
-rw-r--r--lib/sqlalchemy/engine/__init__.py2
-rw-r--r--lib/sqlalchemy/engine/base.py4
-rw-r--r--lib/sqlalchemy/engine/default.py2
-rw-r--r--lib/sqlalchemy/engine/interfaces.py2
-rw-r--r--lib/sqlalchemy/engine/reflection.py2
-rw-r--r--lib/sqlalchemy/engine/result.py16
-rw-r--r--lib/sqlalchemy/engine/strategies.py2
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py2
-rw-r--r--lib/sqlalchemy/engine/url.py2
-rw-r--r--lib/sqlalchemy/engine/util.py2
-rw-r--r--lib/sqlalchemy/event/__init__.py2
-rw-r--r--lib/sqlalchemy/event/api.py2
-rw-r--r--lib/sqlalchemy/event/attr.py2
-rw-r--r--lib/sqlalchemy/event/base.py2
-rw-r--r--lib/sqlalchemy/event/legacy.py2
-rw-r--r--lib/sqlalchemy/event/registry.py2
-rw-r--r--lib/sqlalchemy/events.py42
-rw-r--r--lib/sqlalchemy/exc.py2
-rw-r--r--lib/sqlalchemy/ext/__init__.py2
-rw-r--r--lib/sqlalchemy/ext/associationproxy.py2
-rw-r--r--lib/sqlalchemy/ext/automap.py2
-rw-r--r--lib/sqlalchemy/ext/baked.py2
-rw-r--r--lib/sqlalchemy/ext/compiler.py2
-rw-r--r--lib/sqlalchemy/ext/declarative/__init__.py2
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py2
-rw-r--r--lib/sqlalchemy/ext/declarative/base.py2
-rw-r--r--lib/sqlalchemy/ext/declarative/clsregistry.py2
-rw-r--r--lib/sqlalchemy/ext/horizontal_shard.py2
-rw-r--r--lib/sqlalchemy/ext/hybrid.py2
-rw-r--r--lib/sqlalchemy/ext/mutable.py198
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py2
-rw-r--r--lib/sqlalchemy/ext/serializer.py2
-rw-r--r--lib/sqlalchemy/inspection.py2
-rw-r--r--lib/sqlalchemy/interfaces.py2
-rw-r--r--lib/sqlalchemy/log.py2
-rw-r--r--lib/sqlalchemy/orm/__init__.py2
-rw-r--r--lib/sqlalchemy/orm/attributes.py2
-rw-r--r--lib/sqlalchemy/orm/base.py2
-rw-r--r--lib/sqlalchemy/orm/collections.py2
-rw-r--r--lib/sqlalchemy/orm/dependency.py2
-rw-r--r--lib/sqlalchemy/orm/deprecated_interfaces.py2
-rw-r--r--lib/sqlalchemy/orm/descriptor_props.py2
-rw-r--r--lib/sqlalchemy/orm/dynamic.py2
-rw-r--r--lib/sqlalchemy/orm/evaluator.py2
-rw-r--r--lib/sqlalchemy/orm/events.py2
-rw-r--r--lib/sqlalchemy/orm/exc.py2
-rw-r--r--lib/sqlalchemy/orm/identity.py2
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py2
-rw-r--r--lib/sqlalchemy/orm/interfaces.py2
-rw-r--r--lib/sqlalchemy/orm/loading.py4
-rw-r--r--lib/sqlalchemy/orm/mapper.py2
-rw-r--r--lib/sqlalchemy/orm/path_registry.py2
-rw-r--r--lib/sqlalchemy/orm/persistence.py36
-rw-r--r--lib/sqlalchemy/orm/properties.py2
-rw-r--r--lib/sqlalchemy/orm/query.py110
-rw-r--r--lib/sqlalchemy/orm/relationships.py2
-rw-r--r--lib/sqlalchemy/orm/scoping.py2
-rw-r--r--lib/sqlalchemy/orm/session.py118
-rw-r--r--lib/sqlalchemy/orm/state.py4
-rw-r--r--lib/sqlalchemy/orm/strategies.py24
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py2
-rw-r--r--lib/sqlalchemy/orm/sync.py2
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py15
-rw-r--r--lib/sqlalchemy/orm/util.py28
-rw-r--r--lib/sqlalchemy/pool.py58
-rw-r--r--lib/sqlalchemy/processors.py2
-rw-r--r--lib/sqlalchemy/schema.py2
-rw-r--r--lib/sqlalchemy/sql/__init__.py3
-rw-r--r--lib/sqlalchemy/sql/annotation.py2
-rw-r--r--lib/sqlalchemy/sql/base.py2
-rw-r--r--lib/sqlalchemy/sql/compiler.py142
-rw-r--r--lib/sqlalchemy/sql/crud.py58
-rw-r--r--lib/sqlalchemy/sql/ddl.py2
-rw-r--r--lib/sqlalchemy/sql/default_comparator.py3
-rw-r--r--lib/sqlalchemy/sql/dml.py11
-rw-r--r--lib/sqlalchemy/sql/elements.py21
-rw-r--r--lib/sqlalchemy/sql/expression.py14
-rw-r--r--lib/sqlalchemy/sql/functions.py14
-rw-r--r--lib/sqlalchemy/sql/naming.py2
-rw-r--r--lib/sqlalchemy/sql/operators.py7
-rw-r--r--lib/sqlalchemy/sql/schema.py2
-rw-r--r--lib/sqlalchemy/sql/selectable.py384
-rw-r--r--lib/sqlalchemy/sql/sqltypes.py170
-rw-r--r--lib/sqlalchemy/sql/type_api.py2
-rw-r--r--lib/sqlalchemy/sql/util.py144
-rw-r--r--lib/sqlalchemy/sql/visitors.py2
-rw-r--r--lib/sqlalchemy/testing/__init__.py4
-rw-r--r--lib/sqlalchemy/testing/assertions.py8
-rw-r--r--lib/sqlalchemy/testing/assertsql.py2
-rw-r--r--lib/sqlalchemy/testing/config.py2
-rw-r--r--lib/sqlalchemy/testing/engines.py2
-rw-r--r--lib/sqlalchemy/testing/entities.py2
-rw-r--r--lib/sqlalchemy/testing/exclusions.py2
-rw-r--r--lib/sqlalchemy/testing/fixtures.py2
-rw-r--r--lib/sqlalchemy/testing/mock.py2
-rw-r--r--lib/sqlalchemy/testing/pickleable.py2
-rw-r--r--lib/sqlalchemy/testing/plugin/noseplugin.py2
-rw-r--r--lib/sqlalchemy/testing/plugin/plugin_base.py17
-rw-r--r--lib/sqlalchemy/testing/plugin/pytestplugin.py4
-rw-r--r--lib/sqlalchemy/testing/profiling.py2
-rw-r--r--lib/sqlalchemy/testing/provision.py108
-rw-r--r--lib/sqlalchemy/testing/requirements.py2
-rw-r--r--lib/sqlalchemy/testing/runner.py2
-rw-r--r--lib/sqlalchemy/testing/schema.py2
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py21
-rw-r--r--lib/sqlalchemy/testing/suite/test_sequence.py10
-rw-r--r--lib/sqlalchemy/testing/util.py4
-rw-r--r--lib/sqlalchemy/testing/warnings.py2
-rw-r--r--lib/sqlalchemy/types.py2
-rw-r--r--lib/sqlalchemy/util/__init__.py2
-rw-r--r--lib/sqlalchemy/util/_collections.py8
-rw-r--r--lib/sqlalchemy/util/compat.py4
-rw-r--r--lib/sqlalchemy/util/deprecations.py2
-rw-r--r--lib/sqlalchemy/util/langhelpers.py9
-rw-r--r--lib/sqlalchemy/util/queue.py2
-rw-r--r--lib/sqlalchemy/util/topological.py2
-rw-r--r--reap_oracle_dbs.py24
-rw-r--r--setup.cfg2
-rw-r--r--test/base/test_utils.py50
-rw-r--r--test/dialect/mssql/test_compiler.py26
-rw-r--r--test/dialect/mssql/test_query.py4
-rw-r--r--test/dialect/mssql/test_reflection.py2
-rw-r--r--test/dialect/mssql/test_types.py50
-rw-r--r--test/dialect/mysql/test_compiler.py27
-rw-r--r--test/dialect/mysql/test_dialect.py25
-rw-r--r--test/dialect/mysql/test_types.py18
-rw-r--r--test/dialect/postgresql/test_dialect.py10
-rw-r--r--test/dialect/postgresql/test_reflection.py2
-rw-r--r--test/dialect/postgresql/test_types.py51
-rw-r--r--test/dialect/test_oracle.py46
-rw-r--r--test/dialect/test_sqlite.py37
-rw-r--r--test/engine/test_logging.py130
-rw-r--r--test/engine/test_pool.py60
-rw-r--r--test/engine/test_transaction.py25
-rw-r--r--test/ext/test_mutable.py413
-rw-r--r--test/orm/inheritance/test_basic.py2
-rw-r--r--test/orm/inheritance/test_polymorphic_rel.py75
-rw-r--r--test/orm/test_eager_relations.py149
-rw-r--r--test/orm/test_joins.py9
-rw-r--r--test/orm/test_loading.py18
-rw-r--r--test/orm/test_merge.py51
-rw-r--r--test/orm/test_query.py203
-rw-r--r--test/orm/test_transaction.py65
-rw-r--r--test/requirements.py8
-rw-r--r--test/sql/test_compiler.py165
-rw-r--r--test/sql/test_cte.py152
-rw-r--r--test/sql/test_insert.py71
-rw-r--r--test/sql/test_lateral.py134
-rw-r--r--test/sql/test_operators.py24
-rw-r--r--test/sql/test_resultset.py11
-rw-r--r--test/sql/test_selectable.py27
-rw-r--r--test/sql/test_text.py11
-rw-r--r--test/sql/test_types.py205
-rw-r--r--test/sql/test_update.py36
-rw-r--r--tox.ini24
231 files changed, 5087 insertions, 850 deletions
diff --git a/LICENSE b/LICENSE
index 0208c3f57..12dbfc61b 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
This is the MIT license: http://www.opensource.org/licenses/mit-license.php
-Copyright (c) 2005-2015 the SQLAlchemy authors and contributors <see AUTHORS file>.
+Copyright (c) 2005-2016 the SQLAlchemy authors and contributors <see AUTHORS file>.
SQLAlchemy is a trademark of Michael Bayer.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst
index a0b1ad957..07188b771 100644
--- a/doc/build/changelog/changelog_10.rst
+++ b/doc/build/changelog/changelog_10.rst
@@ -16,8 +16,143 @@
:start-line: 5
.. changelog::
+ :version: 1.0.13
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3682
+
+ Fixed bug where the negation of an EXISTS expression would not
+ be properly typed as boolean in the result, and also would fail to be
+ anonymously aliased in a SELECT list as is the case with a
+ non-negated EXISTS construct.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3666
+
+ Fixed bug where "unconsumed column names" exception would fail to
+ be raised in the case where :meth:`.Insert.values` were called
+ with a list of parameter mappings, instead of a single mapping
+ of parameters. Pull request courtesy Athena Yao.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3663
+
+ Anonymous labeling is applied to a :attr:`.func` construct that is
+ passed to :func:`.column_property`, so that if the same attribute
+ is referred to as a column expression twice the names are de-duped,
+ thus avoiding "ambiguous column" errors. Previously, the
+ ``.label(None)`` would need to be applied in order for the name
+ to be de-anonymized.
+
+ .. change::
+ :tags: bug, py3k
+ :tickets: 3660
+
+ Fixed bug in "to_list" conversion where a single bytes object
+ would be turned into a list of individual characters. This would
+ impact among other things using the :meth:`.Query.get` method
+ on a primary key that's a bytes object.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3658
+
+ Fixed regression appearing in the 1.0 series in ORM loading where the
+ exception raised for an expected column missing would incorrectly
+ be a ``NoneType`` error, rather than the expected
+ :class:`.NoSuchColumnError`.
+
+ .. change::
+ :tags: bug, mssql, oracle
+ :tickets: 3657
+
+ Fixed regression appearing in the 1.0 series which would cause the Oracle
+ and SQL Server dialects to incorrectly account for result set columns
+ when these dialects would wrap a SELECT in a subquery in order to
+ provide LIMIT/OFFSET behavior, and the original SELECT statement
+ referred to the same column multiple times, such as a column and
+ a label of that same column. This issue is related
+ to :ticket:`3658` in that when the error occurred, it would also
+ cause a ``NoneType`` error, rather than reporting that it couldn't
+ locate a column.
+
+.. changelog::
:version: 1.0.12
- :released:
+ :released: February 15, 2016
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3647
+
+ Fixed bug in :meth:`.Session.merge` where an object with a composite
+ primary key that has values for some but not all of the PK fields
+ would emit a SELECT statement leaking the internal NEVER_SET symbol
+ into the query, rather than detecting that this object does not have
+ a searchable primary key and no SELECT should be emitted.
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 3644
+
+ Fixed bug in :func:`.expression.text` construct where a double-colon
+ expression would not escape properly, e.g. ``some\:\:expr``, as is most
+ commonly required when rendering Postgresql-style CAST expressions.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3643
+ :pullreq: github:232
+
+ Fixed issue where the "literal_binds" flag was not propagated
+ for :func:`.expression.insert`, :func:`.expression.update` or
+ :func:`.expression.delete` constructs when compiled to string
+ SQL. Pull request courtesy Tim Tate.
+
+ .. change::
+ :tags: bug, oracle, jython
+ :tickets: 3621
+
+ Fixed a small issue in the Jython Oracle compiler involving the
+ rendering of "RETURNING" which allows this currently
+ unsupported/untested dialect to work rudimentally with the 1.0 series.
+ Pull request courtesy Carlos Rivas.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3642
+
+ Fixed issue where inadvertent use of the Python ``__contains__``
+ override with a column expression (e.g. by using ``'x' in col``)
+ would cause an endless loop in the case of an ARRAY type, as Python
+ defers this to ``__getitem__`` access which never raises for this
+ type. Overall, all use of ``__contains__`` now raises
+ NotImplementedError.
+
+ .. change::
+ :tags: bug, engine, mysql
+ :tickets: 2696
+
+ Revisiting :ticket:`2696`, first released in 1.0.10, which attempts to
+ work around Python 2's lack of exception context reporting by emitting
+ a warning for an exception that was interrupted by a second exception
+ when attempting to roll back the already-failed transaction; this
+ issue continues to occur for MySQL backends in conjunction with a
+ savepoint that gets unexpectedly lost, which then causes a
+ "no such savepoint" error when the rollback is attempted, obscuring
+ what the original condition was.
+
+ The approach has been generalized to the Core "safe
+ reraise" function which takes place across the ORM and Core in any
+ place that a transaction is being rolled back in response to an error
+ which occurred trying to commit, including the context managers
+ provided by :class:`.Session` and :class:`.Connection`, and taking
+ place for operations such as a failure on "RELEASE SAVEPOINT".
+ Previously, the fix was only in place for a specific path within
+ the ORM flush/commit process; it now takes place for all transational
+ context managers as well.
.. change::
:tags: bug, sql
diff --git a/doc/build/changelog/changelog_11.rst b/doc/build/changelog/changelog_11.rst
index 511b7b8be..2a0e609b6 100644
--- a/doc/build/changelog/changelog_11.rst
+++ b/doc/build/changelog/changelog_11.rst
@@ -22,6 +22,214 @@
:version: 1.1.0b1
.. change::
+ :tags: feature, sqlite
+ :tickets: 3629
+
+ The SQLite dialect now reflects the names of primary key constraints.
+ Pull request courtesy Diana Clarke.
+
+ .. seealso::
+
+ :ref:`change_3629`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2857
+
+ Added :meth:`.Select.lateral` and related constructs to allow
+ for the SQL standard LATERAL keyword, currently only supported
+ by Postgresql.
+
+ .. seealso::
+
+ :ref:`change_2857`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 1957
+ :pullreq: github:209
+
+ Added support for rendering "FULL OUTER JOIN" to both Core and ORM.
+ Pull request courtesy Stefan Urbanek.
+
+ .. seealso::
+
+ :ref:`change_1957`
+
+ .. change::
+ :tags: feature, engine
+
+ Added connection pool events :meth:`ConnectionEvents.close`,
+ :meth:`.ConnectionEvents.detach`,
+ :meth:`.ConnectionEvents.close_detached`.
+
+ .. change::
+ :tags: bug, orm, mysql
+ :tickets: 3680
+
+ Further continuing on the common MySQL exception case of
+ a savepoint being cancelled first covered in :ticket:`2696`,
+ the failure mode in which the :class:`.Session` is placed when a
+ SAVEPOINT vanishes before rollback has been improved to allow the
+ :class:`.Session` to still function outside of that savepoint.
+ It is assumed that the savepoint operation failed and was cancelled.
+
+ .. seealso::
+
+ :ref:`change_3680`
+
+ .. change::
+ :tags: feature, mssql
+ :tickets: 3534
+
+ Added basic isolation level support to the SQL Server dialects
+ via :paramref:`.create_engine.isolation_level` and
+ :paramref:`.Connection.execution_options.isolation_level`
+ parameters.
+
+ .. seealso::
+
+ :ref:`change_3534`
+
+ .. change::
+ :tags: feature, mysql
+ :tickets: 3332
+
+ Added support for "autocommit" on MySQL drivers, via the
+ AUTOCOMMIT isolation level setting. Pull request courtesy
+ Roman Podoliaka.
+
+ .. seealso::
+
+ :ref:`change_3332`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3677
+
+ Fixed bug where a newly inserted instance that is rolled back
+ would still potentially cause persistence conflicts on the next
+ transaction, because the instance would not be checked that it
+ was expired. This fix will resolve a large class of cases that
+ erronously cause the "New instance with identity X conflicts with
+ persistent instance Y" error.
+
+ .. seealso::
+
+ :ref:`change_3677`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3662
+
+ An improvement to the workings of :meth:`.Query.correlate` such
+ that when a "polymorphic" entity is used which represents a straight
+ join of several tables, the statement will ensure that all the
+ tables within the join are part of what's correlating.
+
+ .. seealso::
+
+ :ref:`change_3662`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3431
+
+ Fixed bug which would cause an eagerly loaded many-to-one attribute
+ to not be loaded, if the joined eager load were from a row where the
+ same entity were present multiple times, some calling for the attribute
+ to be eagerly loaded and others not. The logic here is revised to
+ take in the attribute even though a different loader path has
+ handled the parent entity already.
+
+ .. seealso::
+
+ :ref:`change_3431`
+
+ .. change::
+ :tags: feature, engine
+ :tickets: 2837
+
+ All string formatting of bound parameter sets and result rows for
+ logging, exception, and ``repr()`` purposes now truncate very large
+ scalar values within each collection, including an
+ "N characters truncated"
+ notation, similar to how the display for large multiple-parameter sets
+ are themselves truncated.
+
+
+ .. seealso::
+
+ :ref:`change_2837`
+
+ .. change::
+ :tags: feature, ext
+ :tickets: 3297
+
+ Added :class:`.MutableSet` and :class:`.MutableList` helper classes
+ to the :ref:`mutable_toplevel` extension. Pull request courtesy
+ Jeong YunWon.
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 2551
+
+ CTE functionality has been expanded to support all DML, allowing
+ INSERT, UPDATE, and DELETE statements to both specify their own
+ WITH clause, as well as for these statements themselves to be
+ CTE expressions when they include a RETURNING clause.
+
+ .. seealso::
+
+ :ref:`change_2551`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3641
+
+ A refinement to the logic which adds columns to the resulting SQL when
+ :meth:`.Query.distinct` is combined with :meth:`.Query.order_by` such
+ that columns which are already present will not be added
+ a second time, even if they are labeled with a different name.
+ Regardless of this change, the extra columns added to the SQL have
+ never been returned in the final result, so this change only impacts
+ the string form of the statement as well as its behavior when used in
+ a Core execution context. Additionally, columns are no longer added
+ when the DISTINCT ON format is used, provided the query is not
+ wrapped inside a subquery due to joined eager loading.
+
+ .. seealso::
+
+ :ref:`change_3641`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3292, 3095
+
+ Added support for PEP-435-style enumerated classes, namely
+ Python 3's ``enum.Enum`` class but also including compatible
+ enumeration libraries, to the :class:`.types.Enum` datatype.
+ The :class:`.types.Enum` datatype now also performs in-Python validation
+ of incoming values, and adds an option to forego creating the
+ CHECK constraint :paramref:`.Enum.create_constraint`.
+ Pull request courtesy Alex Grönholm.
+
+ .. seealso::
+
+ :ref:`change_3292`
+
+ :ref:`change_3095`
+
+ .. change::
+ :tags: change, postgresql
+
+ The ``sqlalchemy.dialects.postgres`` module, long deprecated, is
+ removed; this has emitted a warning for many years and projects
+ should be calling upon ``sqlalchemy.dialects.postgresql``.
+ Engine URLs of the form ``postgres://`` will still continue to function,
+ however.
+
+ .. change::
:tags: bug, sqlite
:tickets: 3634
diff --git a/doc/build/changelog/migration_11.rst b/doc/build/changelog/migration_11.rst
index 3be758226..64ed2d9e2 100644
--- a/doc/build/changelog/migration_11.rst
+++ b/doc/build/changelog/migration_11.rst
@@ -16,7 +16,7 @@ What's New in SQLAlchemy 1.1?
some issues may be moved to later milestones in order to allow
for a timely release.
- Document last updated: January 19, 2016
+ Document last updated: March 23, 2016
Introduction
============
@@ -290,6 +290,136 @@ time on the outside of the subquery.
:ticket:`3582`
+.. _change_3680:
+
+Improved Session state when a SAVEPOINT is cancelled by the database
+--------------------------------------------------------------------
+
+A common case with MySQL is that a SAVEPOINT is cancelled when a deadlock
+occurs within the transaction. The :class:`.Session` has been modfied
+to deal with this failure mode slightly more gracefully, such that the
+outer, non-savepoint transaction still remains usable::
+
+ s = Session()
+ s.begin_nested()
+
+ s.add(SomeObject())
+
+ try:
+ # assume the flush fails, flush goes to rollback to the
+ # savepoint and that also fails
+ s.flush()
+ except Exception as err:
+ print("Something broke, and our SAVEPOINT vanished too")
+
+ # this is the SAVEPOINT transaction, marked as
+ # DEACTIVE so the rollback() call succeeds
+ s.rollback()
+
+ # this is the outermost transaction, remains ACTIVE
+ # so rollback() or commit() can succeed
+ s.rollback()
+
+This issue is a continuation of :ticket:`2696` where we emit a warning
+so that the original error can be seen when running on Python 2, even though
+the SAVEPOINT exception takes precedence. On Python 3, exceptions are chained
+so both failures are reported individually.
+
+
+:ticket:`3680`
+
+.. _change_3677:
+
+Erroneous "new instance X conflicts with persistent instance Y" flush errors fixed
+----------------------------------------------------------------------------------
+
+The :meth:`.Session.rollback` method is responsible for removing objects
+that were INSERTed into the database, e.g. moved from pending to persistent,
+within that now rolled-back transaction. Objects that make this state
+change are tracked in a weak-referencing collection, and if an object is
+garbage collected from that collection, the :class:`.Session` no longer worries
+about it (it would otherwise not scale for operations that insert many new
+objects within a transaction). However, an issue arises if the application
+re-loads that same garbage-collected row within the transaction, before the
+rollback occurs; if a strong reference to this object remains into the next
+transaction, the fact that this object was not inserted and should be
+removed would be lost, and the flush would incorrectly raise an error::
+
+ from sqlalchemy import Column, create_engine
+ from sqlalchemy.orm import Session
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+
+ e = create_engine("sqlite://", echo=True)
+ Base.metadata.create_all(e)
+
+ s = Session(e)
+
+ # persist an object
+ s.add(A(id=1))
+ s.flush()
+
+ # rollback buffer loses reference to A
+
+ # load it again, rollback buffer knows nothing
+ # about it
+ a1 = s.query(A).first()
+
+ # roll back the transaction; all state is expired but the
+ # "a1" reference remains
+ s.rollback()
+
+ # previous "a1" conflicts with the new one because we aren't
+ # checking that it never got committed
+ s.add(A(id=1))
+ s.commit()
+
+The above program would raise::
+
+ FlushError: New instance <User at 0x7f0287eca4d0> with identity key
+ (<class 'test.orm.test_transaction.User'>, ('u1',)) conflicts
+ with persistent instance <User at 0x7f02889c70d0>
+
+The bug is that when the above exception is raised, the unit of work
+is operating upon the original object assuming it's a live row, when in
+fact the object is expired and upon testing reveals that it's gone. The
+fix tests this condition now, so in the SQL log we see:
+
+.. sourcecode:: sql
+
+ BEGIN (implicit)
+
+ INSERT INTO a (id) VALUES (?)
+ (1,)
+
+ SELECT a.id AS a_id FROM a LIMIT ? OFFSET ?
+ (1, 0)
+
+ ROLLBACK
+
+ BEGIN (implicit)
+
+ SELECT a.id AS a_id FROM a WHERE a.id = ?
+ (1,)
+
+ INSERT INTO a (id) VALUES (?)
+ (1,)
+
+ COMMIT
+
+Above, the unit of work now does a SELECT for the row we're about to report
+as a conflict for, sees that it doesn't exist, and proceeds normally.
+The expense of this SELECT is only incurred in the case when we would have
+erroneously raised an exception in any case.
+
+
+:ticket:`3677`
+
.. _change_2349:
passive_deletes feature for joined-inheritance mappings
@@ -463,6 +593,72 @@ would have to be compared during the merge.
:ticket:`3601`
+.. _change_3662:
+
+Improvements to the Query.correlate method with polymoprhic entities
+--------------------------------------------------------------------
+
+In recent SQLAlchemy versions, the SQL generated by many forms of
+"polymorphic" queries has a more "flat" form than it used to, where
+a JOIN of several tables is no longer bundled into a subquery unconditionally.
+To accommodate this, the :meth:`.Query.correlate` method now extracts the
+individual tables from such a polymorphic selectable and ensures that all
+are part of the "correlate" for the subquery. Assuming the
+``Person/Manager/Engineer->Company`` setup from the mapping documentation,
+using with_polymorphic::
+
+ sess.query(Person.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == Person.company_id).
+ correlate(Person).as_scalar() == "Elbonia, Inc.")
+
+The above query now produces::
+
+ SELECT people.name AS people_name
+ FROM people
+ LEFT OUTER JOIN engineers ON people.person_id = engineers.person_id
+ LEFT OUTER JOIN managers ON people.person_id = managers.person_id
+ WHERE (SELECT companies.name
+ FROM companies
+ WHERE companies.company_id = people.company_id) = ?
+
+Before the fix, the call to ``correlate(Person)`` would inadvertently
+attempt to correlate to the join of ``Person``, ``Engineer`` and ``Manager``
+as a single unit, so ``Person`` wouldn't be correlated::
+
+ -- old, incorrect query
+ SELECT people.name AS people_name
+ FROM people
+ LEFT OUTER JOIN engineers ON people.person_id = engineers.person_id
+ LEFT OUTER JOIN managers ON people.person_id = managers.person_id
+ WHERE (SELECT companies.name
+ FROM companies, people
+ WHERE companies.company_id = people.company_id) = ?
+
+Using correlated subqueries against polymorphic mappings still has some
+unpolished edges. If for example ``Person`` is polymorphically linked
+to a so-called "concrete polymorphic union" query, the above subquery
+may not correctly refer to this subquery. In all cases, a way to refer
+to the "polyorphic" entity fully is to create an :func:`.aliased` object
+from it first::
+
+ # works with all SQLAlchemy versions and all types of polymorphic
+ # aliasing.
+
+ paliased = aliased(Person)
+ sess.query(paliased.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == paliased.company_id).
+ correlate(paliased).as_scalar() == "Elbonia, Inc.")
+
+The :func:`.aliased` construct guarantees that the "polymorphic selectable"
+is wrapped in a subquery. By referring to it explicitly in the correlated
+subquery, the polymorphic form is correctly used.
+
+:ticket:`3662`
+
.. _change_3081:
Stringify of Query will consult the Session for the correct dialect
@@ -487,9 +683,239 @@ associated with any bound :class:`.Engine`, then the fallback to the
:ticket:`3081`
+.. _change_3431:
+
+Joined eager loading where the same entity is present multiple times in one row
+-------------------------------------------------------------------------------
+
+A fix has been made to the case has been made whereby an attribute will be
+loaded via joined eager loading, even if the entity was already loaded from the
+row on a different "path" that doesn't include the attribute. This is a
+deep use case that's hard to reproduce, but the general idea is as follows::
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+ b_id = Column(ForeignKey('b.id'))
+ c_id = Column(ForeignKey('c.id'))
+
+ b = relationship("B")
+ c = relationship("C")
+
+
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+ c_id = Column(ForeignKey('c.id'))
+
+ c = relationship("C")
+
+
+ class C(Base):
+ __tablename__ = 'c'
+ id = Column(Integer, primary_key=True)
+ d_id = Column(ForeignKey('d.id'))
+ d = relationship("D")
+
+
+ class D(Base):
+ __tablename__ = 'd'
+ id = Column(Integer, primary_key=True)
+
+
+ c_alias_1 = aliased(C)
+ c_alias_2 = aliased(C)
+
+ q = s.query(A)
+ q = q.join(A.b).join(c_alias_1, B.c).join(c_alias_1.d)
+ q = q.options(contains_eager(A.b).contains_eager(B.c, alias=c_alias_1).contains_eager(C.d))
+ q = q.join(c_alias_2, A.c)
+ q = q.options(contains_eager(A.c, alias=c_alias_2))
+
+The above query emits SQL like this::
+
+ SELECT
+ d.id AS d_id,
+ c_1.id AS c_1_id, c_1.d_id AS c_1_d_id,
+ b.id AS b_id, b.c_id AS b_c_id,
+ c_2.id AS c_2_id, c_2.d_id AS c_2_d_id,
+ a.id AS a_id, a.b_id AS a_b_id, a.c_id AS a_c_id
+ FROM
+ a
+ JOIN b ON b.id = a.b_id
+ JOIN c AS c_1 ON c_1.id = b.c_id
+ JOIN d ON d.id = c_1.d_id
+ JOIN c AS c_2 ON c_2.id = a.c_id
+
+We can see that the ``c`` table is selected from twice; once in the context
+of ``A.b.c -> c_alias_1`` and another in the context of ``A.c -> c_alias_2``.
+Also, we can see that it is quite possible that the ``C`` identity for a
+single row is the **same** for both ``c_alias_1`` and ``c_alias_2``, meaning
+two sets of columns in one row result in only one new object being added
+to the identity map.
+
+The query options above only call for the attribute ``C.d`` to be loaded
+in the context of ``c_alias_1``, and not ``c_alias_2``. So whether or not
+the final ``C`` object we get in the identity map has the ``C.d`` attribute
+loaded depends on how the mappings are traversed, which while not completely
+random, is essentially non-deterministic. The fix is that even if the
+loader for ``c_alias_1`` is processed after that of ``c_alias_2`` for a
+single row where they both refer to the same identity, the ``C.d``
+element will still be loaded. Previously, the loader did not seek to
+modify the load of an entity that was already loaded via a different path.
+The loader that reaches the entity first has always been non-deterministic,
+so this fix may be detectable as a behavioral change in some situations and
+not others.
+
+The fix includes tests for two variants of the "multiple paths to one entity"
+case, and the fix should hopefully cover all other scenarios of this nature.
+
+:ticket:`3431`
+
+.. _change_3641:
+
+Columns no longer added redundantly with DISTINCT + ORDER BY
+------------------------------------------------------------
+
+A query such as the following will now augment only those columns
+that are missing from the SELECT list, without duplicates::
+
+ q = session.query(User.id, User.name.label('name')).\
+ distinct().\
+ order_by(User.id, User.name, User.fullname)
+
+Produces::
+
+ SELECT DISTINCT user.id AS a_id, user.name AS name,
+ user.fullname AS a_fullname
+ FROM a ORDER BY user.id, user.name, user.fullname
+
+Previously, it would produce::
+
+ SELECT DISTINCT user.id AS a_id, user.name AS name, user.name AS a_name,
+ user.fullname AS a_fullname
+ FROM a ORDER BY user.id, user.name, user.fullname
+
+Where above, the ``user.name`` column is added unnecessarily. The results
+would not be affected, as the additional columns are not included in the
+result in any case, but the columns are unnecessary.
+
+Additionally, when the Postgresql DISTINCT ON format is used by passing
+expressions to :meth:`.Query.distinct`, the above "column adding" logic
+is disabled entirely.
+
+When the query is being bundled into a subquery for the purposes of
+joined eager loading, the "augment column list" rules are are necessarily
+more aggressive so that the ORDER BY can still be satisifed, so this case
+remains unchanged.
+
+:ticket:`3641`
+
+New MutableList and MutableSet helpers added to the mutation tracking extension
+-------------------------------------------------------------------------------
+
+New helper classes :class:`.MutableList` and :class:`.MutableSet` have been
+added to the :ref:`mutable_toplevel` extension, to complement the existing
+:class:`.MutableDict` helper.
+
+:ticket:`3297`
+
New Features and Improvements - Core
====================================
+.. _change_2551:
+
+CTE Support for INSERT, UPDATE, DELETE
+--------------------------------------
+
+One of the most widely requested features is support for common table
+expressions (CTE) that work with INSERT, UPDATE, DELETE, and is now implemented.
+An INSERT/UPDATE/DELETE can both draw from a WITH clause that's stated at the
+top of the SQL, as well as can be used as a CTE itself in the context of
+a larger statement.
+
+As part of this change, an INSERT from SELECT that includes a CTE will now
+render the CTE at the top of the entire statement, rather than nested
+in the SELECT statement as was the case in 1.0.
+
+Below is an example that renders UPDATE, INSERT and SELECT all in one
+statement::
+
+ >>> from sqlalchemy import table, column, select, literal, exists
+ >>> orders = table(
+ ... 'orders',
+ ... column('region'),
+ ... column('amount'),
+ ... column('product'),
+ ... column('quantity')
+ ... )
+ >>>
+ >>> upsert = (
+ ... orders.update()
+ ... .where(orders.c.region == 'Region1')
+ ... .values(amount=1.0, product='Product1', quantity=1)
+ ... .returning(*(orders.c._all_columns)).cte('upsert'))
+ >>>
+ >>> insert = orders.insert().from_select(
+ ... orders.c.keys(),
+ ... select([
+ ... literal('Region1'), literal(1.0),
+ ... literal('Product1'), literal(1)
+ ... ]).where(~exists(upsert.select()))
+ ... )
+ >>>
+ >>> print(insert) # note formatting added for clarity
+ WITH upsert AS
+ (UPDATE orders SET amount=:amount, product=:product, quantity=:quantity
+ WHERE orders.region = :region_1
+ RETURNING orders.region, orders.amount, orders.product, orders.quantity
+ )
+ INSERT INTO orders (region, amount, product, quantity)
+ SELECT
+ :param_1 AS anon_1, :param_2 AS anon_2,
+ :param_3 AS anon_3, :param_4 AS anon_4
+ WHERE NOT (
+ EXISTS (
+ SELECT upsert.region, upsert.amount,
+ upsert.product, upsert.quantity
+ FROM upsert))
+
+:ticket:`2551`
+
+.. _change_2857:
+
+Support for the SQL LATERAL keyword
+------------------------------------
+
+The LATERAL keyword is currently known to only be supported by Postgresql 9.3
+and greater, however as it is part of the SQL standard support for this keyword
+is added to Core. The implementation of :meth:`.Select.lateral` employs
+special logic beyond just rendering the LATERAL keyword to allow for
+correlation of tables that are derived from the same FROM clause as the
+selectable, e.g. lateral correlation::
+
+ >>> from sqlalchemy import table, column, select, true
+ >>> people = table('people', column('people_id'), column('age'), column('name'))
+ >>> books = table('books', column('book_id'), column('owner_id'))
+ >>> subq = select([books.c.book_id]).\
+ ... where(books.c.owner_id == people.c.people_id).lateral("book_subq")
+ >>> print (select([people]).select_from(people.join(subq, true())))
+ SELECT people.people_id, people.age, people.name
+ FROM people JOIN LATERAL (SELECT books.book_id AS book_id
+ FROM books WHERE books.owner_id = people.people_id)
+ AS book_subq ON true
+
+.. seealso::
+
+ :ref:`lateral_selects`
+
+ :class:`.Lateral`
+
+ :meth:`.Select.lateral`
+
+
+:ticket:`2857`
+
.. _change_3216:
The ``.autoincrement`` directive is no longer implicitly enabled for a composite primary key column
@@ -588,6 +1014,23 @@ will not have much impact on the behavior of the column during an INSERT.
:ticket:`3216`
+.. _change_1957:
+
+Core and ORM support for FULL OUTER JOIN
+----------------------------------------
+
+The new flag :paramref:`.FromClause.outerjoin.full`, available at the Core
+and ORM level, instructs the compiler to render ``FULL OUTER JOIN``
+where it would normally render ``LEFT OUTER JOIN``::
+
+ stmt = select([t1]).select_from(t1.outerjoin(t2, full=True))
+
+The flag also works at the ORM level::
+
+ q = session.query(MyClass).outerjoin(MyOtherClass, full=True)
+
+:ticket:`1957`
+
.. _change_3501:
ResultSet column matching enhancements; positional column setup for textual SQL
@@ -702,6 +1145,119 @@ used for the fetch.
:ticket:`3501`
+.. _change_3292:
+
+Support for Python's native ``enum`` type and compatible forms
+---------------------------------------------------------------
+
+The :class:`.Enum` type can now be constructed using any
+PEP-435 compliant enumerated type. When using this mode, input values
+and return values are the actual enumerated objects, not the
+string values::
+
+ import enum
+ from sqlalchemy import Table, MetaData, Column, Enum, create_engine
+
+
+ class MyEnum(enum.Enum):
+ one = "one"
+ two = "two"
+ three = "three"
+
+
+ t = Table(
+ 'data', MetaData(),
+ Column('value', Enum(MyEnum))
+ )
+
+ e = create_engine("sqlite://")
+ t.create(e)
+
+ e.execute(t.insert(), {"value": MyEnum.two})
+ assert e.scalar(t.select()) is MyEnum.two
+
+
+:ticket:`3292`
+
+.. _change_3095:
+
+The ``Enum`` type now does in-Python validation of values
+---------------------------------------------------------
+
+To accomodate for Python native enumerated objects, as well as for edge
+cases such as that of where a non-native ENUM type is used within an ARRAY
+and a CHECK contraint is infeasible, the :class:`.Enum` datatype now adds
+in-Python validation of input values::
+
+
+ >>> from sqlalchemy import Table, MetaData, Column, Enum, create_engine
+ >>> t = Table(
+ ... 'data', MetaData(),
+ ... Column('value', Enum("one", "two", "three"))
+ ... )
+ >>> e = create_engine("sqlite://")
+ >>> t.create(e)
+ >>> e.execute(t.insert(), {"value": "four"})
+ Traceback (most recent call last):
+ ...
+ sqlalchemy.exc.StatementError: (exceptions.LookupError)
+ "four" is not among the defined enum values
+ [SQL: u'INSERT INTO data (value) VALUES (?)']
+ [parameters: [{'value': 'four'}]]
+
+For simplicity and consistency, this validation is now turned on in all cases,
+whether or not the enumerated type uses a database-native form, whether
+or not the CHECK constraint is in use, as well as whether or not a
+PEP-435 enumerated type or plain list of string values is used. The
+check also occurs on the result-handling side as well, when values coming
+from the database are returned.
+
+This validation is in addition to the existing behavior of creating a
+CHECK constraint when a non-native enumerated type is used. The creation of
+this CHECK constraint can now be disabled using the new
+:paramref:`.Enum.create_constraint` flag.
+
+:ticket:`3095`
+
+.. _change_2837:
+
+Large parameter and row values are now truncated in logging and exception displays
+----------------------------------------------------------------------------------
+
+A large value present as a bound parameter for a SQL statement, as well as a
+large value present in a result row, will now be truncated during display
+within logging, exception reporting, as well as ``repr()`` of the row itself::
+
+ >>> from sqlalchemy import create_engine
+ >>> import random
+ >>> e = create_engine("sqlite://", echo='debug')
+ >>> some_value = ''.join(chr(random.randint(52, 85)) for i in range(5000))
+ >>> row = e.execute("select ?", [some_value]).first()
+ ... (lines are wrapped for clarity) ...
+ 2016-02-17 13:23:03,027 INFO sqlalchemy.engine.base.Engine select ?
+ 2016-02-17 13:23:03,027 INFO sqlalchemy.engine.base.Engine
+ ('E6@?>9HPOJB<<BHR:@=TS:5ILU=;JLM<4?B9<S48PTNG9>:=TSTLA;9K;9FPM4M8M@;NM6GU
+ LUAEBT9QGHNHTHR5EP75@OER4?SKC;D:TFUMD:M>;C6U:JLM6R67GEK<A6@S@C@J7>4=4:P
+ GJ7HQ6 ... (4702 characters truncated) ... J6IK546AJMB4N6S9L;;9AKI;=RJP
+ HDSSOTNBUEEC9@Q:RCL:I@5?FO<9K>KJAGAO@E6@A7JI8O:J7B69T6<8;F:S;4BEIJS9HM
+ K:;5OLPM@JR;R:J6<SOTTT=>Q>7T@I::OTDC:CC<=NGP6C>BC8N',)
+ 2016-02-17 13:23:03,027 DEBUG sqlalchemy.engine.base.Engine Col ('?',)
+ 2016-02-17 13:23:03,027 DEBUG sqlalchemy.engine.base.Engine
+ Row (u'E6@?>9HPOJB<<BHR:@=TS:5ILU=;JLM<4?B9<S48PTNG9>:=TSTLA;9K;9FPM4M8M@;
+ NM6GULUAEBT9QGHNHTHR5EP75@OER4?SKC;D:TFUMD:M>;C6U:JLM6R67GEK<A6@S@C@J7
+ >4=4:PGJ7HQ ... (4703 characters truncated) ... J6IK546AJMB4N6S9L;;9AKI;=
+ RJPHDSSOTNBUEEC9@Q:RCL:I@5?FO<9K>KJAGAO@E6@A7JI8O:J7B69T6<8;F:S;4BEIJS9HM
+ K:;5OLPM@JR;R:J6<SOTTT=>Q>7T@I::OTDC:CC<=NGP6C>BC8N',)
+ >>> print row
+ (u'E6@?>9HPOJB<<BHR:@=TS:5ILU=;JLM<4?B9<S48PTNG9>:=TSTLA;9K;9FPM4M8M@;NM6
+ GULUAEBT9QGHNHTHR5EP75@OER4?SKC;D:TFUMD:M>;C6U:JLM6R67GEK<A6@S@C@J7>4
+ =4:PGJ7HQ ... (4703 characters truncated) ... J6IK546AJMB4N6S9L;;9AKI;
+ =RJPHDSSOTNBUEEC9@Q:RCL:I@5?FO<9K>KJAGAO@E6@A7JI8O:J7B69T6<8;F:S;4BEIJS9H
+ MK:;5OLPM@JR;R:J6<SOTTT=>Q>7T@I::OTDC:CC<=NGP6C>BC8N',)
+
+
+:ticket:`2837`
+
.. _change_2528:
A UNION or similar of SELECTs with LIMIT/OFFSET/ORDER BY now parenthesizes the embedded selects
@@ -1364,6 +1920,15 @@ emits::
:ticket:`2729`
+The "postgres" module is removed
+---------------------------------
+
+The ``sqlalchemy.dialects.postgres`` module, long deprecated, is
+removed; this has emitted a warning for many years and projects
+should be calling upon ``sqlalchemy.dialects.postgresql``.
+Engine URLs of the form ``postgres://`` will still continue to function,
+however.
+
Dialect Improvements and Changes - MySQL
=============================================
@@ -1385,6 +1950,25 @@ common to both MySQL and Postgresql.
:ticket:`3547`
+.. _change_3332:
+
+Added support for AUTOCOMMIT "isolation level"
+----------------------------------------------
+
+The MySQL dialect now accepts the value "AUTOCOMMIT" for the
+:paramref:`.create_engine.isolation_level` and
+:paramref:`.Connection.execution_options.isolation_level`
+parameters::
+
+ connection = engine.connect()
+ connection = connection.execution_options(
+ isolation_level="AUTOCOMMIT"
+ )
+
+The isolation level makes use of the various "autocommit" attributes
+provided by most MySQL DBAPIs.
+
+:ticket:`3332`
.. _change_mysql_3216:
@@ -1502,10 +2086,43 @@ the``CREATE INDEX`` DDL didn't work correctly for a schema-bound table
and the :meth:`.Inspector.get_foreign_keys` method will now indicate the
given schema in the results. Cross-schema foreign keys aren't supported.
+.. _change_3629:
+
+Reflection of the name of PRIMARY KEY constraints
+--------------------------------------------------
+
+The SQLite backend now takes advantage of the "sqlite_master" view
+of SQLite in order to extract the name of the primary key constraint
+of a table from the original DDL, in the same way that is achieved for
+foreign key constraints in recent SQLAlchemy versions.
+
+:ticket:`3629`
Dialect Improvements and Changes - SQL Server
=============================================
+.. _change_3534:
+
+Added transaction isolation level support for SQL Server
+--------------------------------------------------------
+
+All SQL Server dialects support transaction isolation level settings
+via the :paramref:`.create_engine.isolation_level` and
+:paramref:`.Connection.execution_options.isolation_level`
+parameters. The four standard levels are supported as well as
+``SNAPSHOT``::
+
+ engine = create_engine(
+ "mssql+pyodbc://scott:tiger@ms_2008",
+ isolation_level="REPEATABLE READ"
+ )
+
+.. seealso::
+
+ :ref:`mssql_isolation_level`
+
+:ticket:`3534`
+
.. _change_3504:
String / varlength types no longer represent "max" explicitly on reflection
diff --git a/doc/build/conf.py b/doc/build/conf.py
index e19078a87..cfcb112d5 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -129,7 +129,7 @@ master_doc = 'contents'
# General information about the project.
project = u'SQLAlchemy'
-copyright = u'2007-2015, the SQLAlchemy authors and contributors'
+copyright = u'2007-2016, the SQLAlchemy authors and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
diff --git a/doc/build/copyright.rst b/doc/build/copyright.rst
index 50ca03ae8..e5ec9699d 100644
--- a/doc/build/copyright.rst
+++ b/doc/build/copyright.rst
@@ -6,7 +6,7 @@ Appendix: Copyright
This is the MIT license: `<http://www.opensource.org/licenses/mit-license.php>`_
-Copyright (c) 2005-2015 Michael Bayer and contributors.
+Copyright (c) 2005-2016 Michael Bayer and contributors.
SQLAlchemy is a trademark of Michael Bayer.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
diff --git a/doc/build/core/selectable.rst b/doc/build/core/selectable.rst
index e73ce7b64..3f4d9565e 100644
--- a/doc/build/core/selectable.rst
+++ b/doc/build/core/selectable.rst
@@ -23,6 +23,8 @@ elements are themselves :class:`.ColumnElement` subclasses).
.. autofunction:: join
+.. autofunction:: lateral
+
.. autofunction:: outerjoin
.. autofunction:: select
@@ -57,6 +59,9 @@ elements are themselves :class:`.ColumnElement` subclasses).
:members:
:inherited-members:
+.. autoclass:: HasCTE
+ :members:
+
.. autoclass:: HasPrefixes
:members:
@@ -67,6 +72,8 @@ elements are themselves :class:`.ColumnElement` subclasses).
:members:
:inherited-members:
+.. autoclass:: Lateral
+
.. autoclass:: ScalarSelect
:members:
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index 04262ac5e..0fd78abeb 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -1690,6 +1690,74 @@ by telling it to correlate all FROM clauses except for ``users``:
('jack',)
{stop}[(u'jack', u'jack@yahoo.com'), (u'jack', u'jack@msn.com')]
+.. _lateral_selects:
+
+LATERAL correlation
+^^^^^^^^^^^^^^^^^^^
+
+LATERAL correlation is a special sub-category of SQL correlation which
+allows a selectable unit to refer to another selectable unit within a
+single FROM clause. This is an extremely special use case which, while
+part of the SQL standard, is only known to be supported by recent
+versions of Postgresql.
+
+Normally, if a SELECT statement refers to
+``table1 JOIN (some SELECT) AS subquery`` in its FROM clause, the subquery
+on the right side may not refer to the "table1" expression from the left side;
+correlation may only refer to a table that is part of another SELECT that
+entirely encloses this SELECT. The LATERAL keyword allows us to turn this
+behavior around, allowing an expression such as:
+
+.. sourcecode:: sql
+
+ SELECT people.people_id, people.age, people.name
+ FROM people JOIN LATERAL (SELECT books.book_id AS book_id
+ FROM books WHERE books.owner_id = people.people_id)
+ AS book_subq ON true
+
+Where above, the right side of the JOIN contains a subquery that refers not
+just to the "books" table but also the "people" table, correlating
+to the left side of the JOIN. SQLAlchemy Core supports a statement
+like the above using the :meth:`.Select.lateral` method as follows::
+
+ >>> from sqlalchemy import table, column, select, true
+ >>> people = table('people', column('people_id'), column('age'), column('name'))
+ >>> books = table('books', column('book_id'), column('owner_id'))
+ >>> subq = select([books.c.book_id]).\
+ ... where(books.c.owner_id == people.c.people_id).lateral("book_subq")
+ >>> print (select([people]).select_from(people.join(subq, true())))
+ SELECT people.people_id, people.age, people.name
+ FROM people JOIN LATERAL (SELECT books.book_id AS book_id
+ FROM books WHERE books.owner_id = people.people_id)
+ AS book_subq ON true
+
+Above, we can see that the :meth:`.Select.lateral` method acts a lot like
+the :meth:`.Select.alias` method, including that we can specify an optional
+name. However the construct is the :class:`.Lateral` construct instead of
+an :class:`.Alias` which provides for the LATERAL keyword as well as special
+instructions to allow correlation from inside the FROM clause of the
+enclosing statement.
+
+The :meth:`.Select.lateral` method interacts normally with the
+:meth:`.Select.correlate` and :meth:`.Select.correlate_except` methods, except
+that the correlation rules also apply to any other tables present in the
+enclosing statement's FROM clause. Correlation is "automatic" to these
+tables by default, is explicit if the table is specified to
+:meth:`.Select.correlate`, and is explicit to all tables except those
+specified to :meth:`.Select.correlate_except`.
+
+
+.. versionadded:: 1.1
+
+ Support for the LATERAL keyword and lateral correlation.
+
+.. seealso::
+
+ :class:`.Lateral`
+
+ :meth:`.Select.lateral`
+
+
Ordering, Grouping, Limiting, Offset...ing...
---------------------------------------------
diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst
index 9c1395f14..4fd9b0633 100644
--- a/doc/build/glossary.rst
+++ b/doc/build/glossary.rst
@@ -129,6 +129,7 @@ Glossary
lazy load
lazy loads
+ lazy loaded
lazy loading
In object relational mapping, a "lazy load" refers to an
attribute that does not contain its database-side value
diff --git a/doc/build/orm/cascades.rst b/doc/build/orm/cascades.rst
index f645e6dae..0a0f23a70 100644
--- a/doc/build/orm/cascades.rst
+++ b/doc/build/orm/cascades.rst
@@ -17,7 +17,7 @@ the :ref:`cascade_delete` and :ref:`cascade_delete_orphan` options;
these settings are appropriate for related objects which only exist as
long as they are attached to their parent, and are otherwise deleted.
-Cascade behavior is configured using the by changing the
+Cascade behavior is configured using the
:paramref:`~.relationship.cascade` option on
:func:`~sqlalchemy.orm.relationship`::
@@ -341,7 +341,7 @@ easily described through demonstration; it means that, given a mapping such as t
})
If an ``Order`` is already in the session, and is assigned to the ``order``
-attribute of an ``Item``, the backref appends the ``Order`` to the ``items``
+attribute of an ``Item``, the backref appends the ``Item`` to the ``items``
collection of that ``Order``, resulting in the ``save-update`` cascade taking
place::
diff --git a/doc/build/orm/extensions/mutable.rst b/doc/build/orm/extensions/mutable.rst
index 969411481..2ef0a5adb 100644
--- a/doc/build/orm/extensions/mutable.rst
+++ b/doc/build/orm/extensions/mutable.rst
@@ -23,5 +23,12 @@ API Reference
:members:
:undoc-members:
+.. autoclass:: MutableList
+ :members:
+ :undoc-members:
+
+.. autoclass:: MutableSet
+ :members:
+ :undoc-members:
diff --git a/doc/build/orm/relationship_persistence.rst b/doc/build/orm/relationship_persistence.rst
index 597f674ed..1a855216c 100644
--- a/doc/build/orm/relationship_persistence.rst
+++ b/doc/build/orm/relationship_persistence.rst
@@ -1,4 +1,4 @@
-fSpecial Relationship Persistence Patterns
+Special Relationship Persistence Patterns
=========================================
.. _post_update:
diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst
index 559367121..0a9fc7430 100644
--- a/doc/build/orm/tutorial.rst
+++ b/doc/build/orm/tutorial.rst
@@ -2041,7 +2041,7 @@ that a single user might have lots of blog posts. When we access
``User.posts``, we'd like to be able to filter results further so as not to
load the entire collection. For this we use a setting accepted by
:func:`~sqlalchemy.orm.relationship` called ``lazy='dynamic'``, which
-configures an alternate **loader strategy** on the attribute::
+configures an alternate **loader strategy** on the attribute:
.. sourcecode:: python+sql
diff --git a/examples/performance/short_selects.py b/examples/performance/short_selects.py
index ef1fcff4a..6f64aa63e 100644
--- a/examples/performance/short_selects.py
+++ b/examples/performance/short_selects.py
@@ -1,5 +1,5 @@
-"""This series of tests illustrates different ways to INSERT a large number
-of rows in bulk.
+"""This series of tests illustrates different ways to SELECT a single
+record by primary key
"""
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index dde179bf5..1193a1b0b 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -1,5 +1,5 @@
# sqlalchemy/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -32,6 +32,7 @@ from .sql import (
intersect,
intersect_all,
join,
+ lateral,
literal,
literal_column,
modifier,
diff --git a/lib/sqlalchemy/cextension/processors.c b/lib/sqlalchemy/cextension/processors.c
index 59eb2648c..5357e34dc 100644
--- a/lib/sqlalchemy/cextension/processors.c
+++ b/lib/sqlalchemy/cextension/processors.c
@@ -1,6 +1,6 @@
/*
processors.c
-Copyright (C) 2010-2015 the SQLAlchemy authors and contributors <see AUTHORS file>
+Copyright (C) 2010-2016 the SQLAlchemy authors and contributors <see AUTHORS file>
Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/cextension/resultproxy.c b/lib/sqlalchemy/cextension/resultproxy.c
index 331fae2b2..170278cff 100644
--- a/lib/sqlalchemy/cextension/resultproxy.c
+++ b/lib/sqlalchemy/cextension/resultproxy.c
@@ -1,6 +1,6 @@
/*
resultproxy.c
-Copyright (C) 2010-2015 the SQLAlchemy authors and contributors <see AUTHORS file>
+Copyright (C) 2010-2016 the SQLAlchemy authors and contributors <see AUTHORS file>
Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/cextension/utils.c b/lib/sqlalchemy/cextension/utils.c
index 6e00eb816..208f32831 100644
--- a/lib/sqlalchemy/cextension/utils.c
+++ b/lib/sqlalchemy/cextension/utils.c
@@ -1,6 +1,6 @@
/*
utils.c
-Copyright (C) 2012-2015 the SQLAlchemy authors and contributors <see AUTHORS file>
+Copyright (C) 2012-2016 the SQLAlchemy authors and contributors <see AUTHORS file>
This module is part of SQLAlchemy and is released under
the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/connectors/__init__.py b/lib/sqlalchemy/connectors/__init__.py
index 5f65b9306..d72c390cf 100644
--- a/lib/sqlalchemy/connectors/__init__.py
+++ b/lib/sqlalchemy/connectors/__init__.py
@@ -1,5 +1,5 @@
# connectors/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py
index 1bbf899c4..9fc0ce6b5 100644
--- a/lib/sqlalchemy/connectors/mxodbc.py
+++ b/lib/sqlalchemy/connectors/mxodbc.py
@@ -1,5 +1,5 @@
# connectors/mxodbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py
index 84bc92bee..68bbcc435 100644
--- a/lib/sqlalchemy/connectors/pyodbc.py
+++ b/lib/sqlalchemy/connectors/pyodbc.py
@@ -1,5 +1,5 @@
# connectors/pyodbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/connectors/zxJDBC.py b/lib/sqlalchemy/connectors/zxJDBC.py
index 8219a06eb..e7b2dc9ab 100644
--- a/lib/sqlalchemy/connectors/zxJDBC.py
+++ b/lib/sqlalchemy/connectors/zxJDBC.py
@@ -1,5 +1,5 @@
# connectors/zxJDBC.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/databases/__init__.py b/lib/sqlalchemy/databases/__init__.py
index 321ff999b..0bfc93747 100644
--- a/lib/sqlalchemy/databases/__init__.py
+++ b/lib/sqlalchemy/databases/__init__.py
@@ -1,5 +1,5 @@
# databases/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py
index f851a4ab8..bf9c6d38e 100644
--- a/lib/sqlalchemy/dialects/__init__.py
+++ b/lib/sqlalchemy/dialects/__init__.py
@@ -1,5 +1,5 @@
# dialects/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -17,6 +17,7 @@ __all__ = (
from .. import util
+_translates = {'postgres': 'postgresql'}
def _auto_fn(name):
"""default dialect importer.
@@ -30,6 +31,14 @@ def _auto_fn(name):
else:
dialect = name
driver = "base"
+
+ if dialect in _translates:
+ translated = _translates[dialect]
+ util.warn_deprecated(
+ "The '%s' dialect name has been "
+ "renamed to '%s'" % (dialect, translated)
+ )
+ dialect = translated
try:
module = __import__('sqlalchemy.dialects.%s' % (dialect, )).dialects
except ImportError:
diff --git a/lib/sqlalchemy/dialects/firebird/__init__.py b/lib/sqlalchemy/dialects/firebird/__init__.py
index b2fb57a63..f27bdc05b 100644
--- a/lib/sqlalchemy/dialects/firebird/__init__.py
+++ b/lib/sqlalchemy/dialects/firebird/__init__.py
@@ -1,5 +1,5 @@
# firebird/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py
index acd419e85..16e2c55b8 100644
--- a/lib/sqlalchemy/dialects/firebird/base.py
+++ b/lib/sqlalchemy/dialects/firebird/base.py
@@ -1,5 +1,5 @@
# firebird/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/firebird/fdb.py b/lib/sqlalchemy/dialects/firebird/fdb.py
index 0ab07498b..aff8cff15 100644
--- a/lib/sqlalchemy/dialects/firebird/fdb.py
+++ b/lib/sqlalchemy/dialects/firebird/fdb.py
@@ -1,5 +1,5 @@
# firebird/fdb.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
index 7d1a834b8..3df9f736b 100644
--- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
+++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
@@ -1,5 +1,5 @@
# firebird/kinterbasdb.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py
index 898b40cd5..8c9e85862 100644
--- a/lib/sqlalchemy/dialects/mssql/__init__.py
+++ b/lib/sqlalchemy/dialects/mssql/__init__.py
@@ -1,5 +1,5 @@
# mssql/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py
index 6e3f348fc..60fa25d34 100644
--- a/lib/sqlalchemy/dialects/mssql/adodbapi.py
+++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py
@@ -1,5 +1,5 @@
# mssql/adodbapi.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 815529c88..051efa719 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -1,5 +1,5 @@
# mssql/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -199,6 +199,47 @@ If using SQL Server 2005 or above, LIMIT with OFFSET
support is available through the ``ROW_NUMBER OVER`` construct.
For versions below 2005, LIMIT with OFFSET usage will fail.
+.. _mssql_isolation_level:
+
+Transaction Isolation Level
+---------------------------
+
+All SQL Server dialects support setting of transaction isolation level
+both via a dialect-specific parameter
+:paramref:`.create_engine.isolation_level`
+accepted by :func:`.create_engine`,
+as well as the :paramref:`.Connection.execution_options.isolation_level`
+argument as passed to
+:meth:`.Connection.execution_options`. This feature works by issuing the
+command ``SET TRANSACTION ISOLATION LEVEL <level>`` for
+each new connection.
+
+To set isolation level using :func:`.create_engine`::
+
+ engine = create_engine(
+ "mssql+pyodbc://scott:tiger@ms_2008",
+ isolation_level="REPEATABLE READ"
+ )
+
+To set using per-connection execution options::
+
+ connection = engine.connect()
+ connection = connection.execution_options(
+ isolation_level="READ COMMITTED"
+ )
+
+Valid values for ``isolation_level`` include:
+
+* ``READ COMMITTED``
+* ``READ UNCOMMITTED``
+* ``REPEATABLE READ``
+* ``SERIALIZABLE``
+* ``SNAPSHOT`` - specific to SQL Server
+
+.. versionadded:: 1.1 support for isolation level setting on Microsoft
+ SQL Server.
+
+
Nullability
-----------
MSSQL has support for three levels of column nullability. The default
@@ -214,7 +255,7 @@ render::
name VARCHAR(20)
If ``nullable`` is ``True`` or ``False`` then the column will be
-``NULL` or ``NOT NULL`` respectively.
+``NULL`` or ``NOT NULL`` respectively.
Date / Time Handling
--------------------
@@ -418,6 +459,40 @@ Declarative form::
This option can also be specified engine-wide using the
``implicit_returning=False`` argument on :func:`.create_engine`.
+.. _mssql_rowcount_versioning:
+
+Rowcount Support / ORM Versioning
+---------------------------------
+
+The SQL Server drivers have very limited ability to return the number
+of rows updated from an UPDATE or DELETE statement. In particular, the
+pymssql driver has no support, whereas the pyodbc driver can only return
+this value under certain conditions.
+
+In particular, updated rowcount is not available when OUTPUT INSERTED
+is used. This impacts the SQLAlchemy ORM's versioning feature when
+server-side versioning schemes are used. When
+using pyodbc, the "implicit_returning" flag needs to be set to false
+for any ORM mapped class that uses a version_id column in conjunction with
+a server-side version generator::
+
+ class MyTable(Base):
+ __tablename__ = 'mytable'
+ id = Column(Integer, primary_key=True)
+ stuff = Column(String(10))
+ timestamp = Column(TIMESTAMP(), default=text('DEFAULT'))
+ __mapper_args__ = {
+ 'version_id_col': timestamp,
+ 'version_id_generator': False,
+ }
+ __table_args__ = {
+ 'implicit_returning': False
+ }
+
+Without the implicit_returning flag above, the UPDATE statement will
+use ``OUTPUT inserted.timestamp`` and the rowcount will be returned as
+-1, causing the versioning logic to fail.
+
Enabling Snapshot Isolation
---------------------------
@@ -1513,6 +1588,7 @@ class MSDialect(default.DefaultDialect):
use_scope_identity=True,
max_identifier_length=None,
schema_name="dbo",
+ isolation_level=None,
deprecate_large_types=None,
legacy_schema_aliasing=False, **opts):
self.query_timeout = int(query_timeout or 0)
@@ -1526,6 +1602,8 @@ class MSDialect(default.DefaultDialect):
super(MSDialect, self).__init__(**opts)
+ self.isolation_level = isolation_level
+
def do_savepoint(self, connection, name):
# give the DBAPI a push
connection.execute("IF @@TRANCOUNT = 0 BEGIN TRANSACTION")
@@ -1535,10 +1613,52 @@ class MSDialect(default.DefaultDialect):
# SQL Server does not support RELEASE SAVEPOINT
pass
+ _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
+ 'READ COMMITTED', 'REPEATABLE READ',
+ 'SNAPSHOT'])
+
+ def set_isolation_level(self, connection, level):
+ level = level.replace('_', ' ')
+ if level not in self._isolation_lookup:
+ raise exc.ArgumentError(
+ "Invalid value '%s' for isolation_level. "
+ "Valid isolation levels for %s are %s" %
+ (level, self.name, ", ".join(self._isolation_lookup))
+ )
+ cursor = connection.cursor()
+ cursor.execute(
+ "SET TRANSACTION ISOLATION LEVEL %s" % level)
+ cursor.close()
+
+ def get_isolation_level(self, connection):
+ cursor = connection.cursor()
+ cursor.execute("""
+ SELECT CASE transaction_isolation_level
+ WHEN 0 THEN NULL
+ WHEN 1 THEN 'READ UNCOMMITTED'
+ WHEN 2 THEN 'READ COMMITTED'
+ WHEN 3 THEN 'REPEATABLE READ'
+ WHEN 4 THEN 'SERIALIZABLE'
+ WHEN 5 THEN 'SNAPSHOT' END AS TRANSACTION_ISOLATION_LEVEL
+ FROM sys.dm_exec_sessions
+ where session_id = @@SPID
+ """)
+ val = cursor.fetchone()[0]
+ cursor.close()
+ return val.upper()
+
def initialize(self, connection):
super(MSDialect, self).initialize(connection)
self._setup_version_attributes()
+ def on_connect(self):
+ if self.isolation_level is not None:
+ def connect(conn):
+ self.set_isolation_level(conn, self.isolation_level)
+ return connect
+ else:
+ return None
+
def _setup_version_attributes(self):
if self.server_version_info[0] not in list(range(8, 17)):
# FreeTDS with version 4.2 seems to report here
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index a6faa7bca..e2c0a466c 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -1,5 +1,5 @@
# mssql/information_schema.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py
index ac87c67a9..5e20ed11b 100644
--- a/lib/sqlalchemy/dialects/mssql/mxodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py
@@ -1,5 +1,5 @@
# mssql/mxodbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index 1d7635c7f..e3a4db8ab 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -1,5 +1,5 @@
# mssql/pymssql.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index 7ec8cbaa7..45c091cfb 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -1,5 +1,5 @@
# mssql/pyodbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -93,6 +93,13 @@ for unix + PyODBC.
.. versionadded:: 0.7.7
``supports_unicode_binds`` parameter to ``create_engine()``\ .
+Rowcount Support
+----------------
+
+Pyodbc only has partial support for rowcount. See the notes at
+:ref:`mssql_rowcount_versioning` for important notes when using ORM
+versioning.
+
"""
from .base import MSExecutionContext, MSDialect, VARBINARY
diff --git a/lib/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
index 85539817e..0bf68c2a2 100644
--- a/lib/sqlalchemy/dialects/mssql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
@@ -1,5 +1,5 @@
# mssql/zxjdbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py
index ca204fcd1..a17bcb402 100644
--- a/lib/sqlalchemy/dialects/mysql/__init__.py
+++ b/lib/sqlalchemy/dialects/mysql/__init__.py
@@ -1,5 +1,5 @@
# mysql/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 61c4a3fac..5abb1f3d6 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -1,5 +1,5 @@
# mysql/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -111,19 +111,45 @@ to be used.
Transaction Isolation Level
---------------------------
-:func:`.create_engine` accepts an :paramref:`.create_engine.isolation_level`
-parameter which results in the command ``SET SESSION
-TRANSACTION ISOLATION LEVEL <level>`` being invoked for
-every new connection. Valid values for this parameter are
-``READ COMMITTED``, ``READ UNCOMMITTED``,
-``REPEATABLE READ``, and ``SERIALIZABLE``::
+All MySQL dialects support setting of transaction isolation level
+both via a dialect-specific parameter :paramref:`.create_engine.isolation_level`
+accepted by :func:`.create_engine`,
+as well as the :paramref:`.Connection.execution_options.isolation_level`
+argument as passed to :meth:`.Connection.execution_options`.
+This feature works by issuing the command
+``SET SESSION TRANSACTION ISOLATION LEVEL <level>`` for
+each new connection. For the special AUTOCOMMIT isolation level, DBAPI-specific
+techniques are used.
+
+To set isolation level using :func:`.create_engine`::
engine = create_engine(
"mysql://scott:tiger@localhost/test",
isolation_level="READ UNCOMMITTED"
)
-.. versionadded:: 0.7.6
+To set using per-connection execution options::
+
+ connection = engine.connect()
+ connection = connection.execution_options(
+ isolation_level="READ COMMITTED"
+ )
+
+Valid values for ``isolation_level`` include:
+
+* ``READ COMMITTED``
+* ``READ UNCOMMITTED``
+* ``REPEATABLE READ``
+* ``SERIALIZABLE``
+* ``AUTOCOMMIT``
+
+The special ``AUTOCOMMIT`` value makes use of the various "autocommit"
+attributes provided by specific DBAPIs, and is currently supported by
+MySQLdb, MySQL-Client, MySQL-Connector Python, and PyMySQL. Using it,
+the MySQL connection will return true for the value of
+``SELECT @@autocommit;``.
+
+.. versionadded:: 1.1 - added support for the AUTOCOMMIT isolation level.
AUTO_INCREMENT Behavior
-----------------------
@@ -835,9 +861,16 @@ class MySQLCompiler(compiler.SQLCompiler):
return ""
def visit_join(self, join, asfrom=False, **kwargs):
+ if join.full:
+ join_type = " FULL OUTER JOIN "
+ elif join.isouter:
+ join_type = " LEFT OUTER JOIN "
+ else:
+ join_type = " INNER JOIN "
+
return ''.join(
(self.process(join.left, asfrom=True, **kwargs),
- (join.isouter and " LEFT OUTER JOIN " or " INNER JOIN "),
+ join_type,
self.process(join.right, asfrom=True, **kwargs),
" ON ",
self.process(join.onclause, **kwargs)))
@@ -1437,6 +1470,16 @@ class MySQLDialect(default.DefaultDialect):
def set_isolation_level(self, connection, level):
level = level.replace('_', ' ')
+
+ # adjust for ConnectionFairy being present
+ # allows attribute set e.g. "connection.autocommit = True"
+ # to work properly
+ if hasattr(connection, 'connection'):
+ connection = connection.connection
+
+ self._set_isolation_level(connection, level)
+
+ def _set_isolation_level(self, connection, level):
if level not in self._isolation_lookup:
raise exc.ArgumentError(
"Invalid value '%s' for isolation_level. "
diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py
index 6d8466ab1..8bc0ae3be 100644
--- a/lib/sqlalchemy/dialects/mysql/cymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/cymysql.py
@@ -1,5 +1,5 @@
# mysql/cymysql.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/enumerated.py b/lib/sqlalchemy/dialects/mysql/enumerated.py
index 53de2b5fe..567e95288 100644
--- a/lib/sqlalchemy/dialects/mysql/enumerated.py
+++ b/lib/sqlalchemy/dialects/mysql/enumerated.py
@@ -1,5 +1,5 @@
# mysql/enumerated.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -69,13 +69,16 @@ class ENUM(sqltypes.Enum, _EnumeratedValues):
:param enums: The range of valid values for this ENUM. Values will be
quoted when generating the schema according to the quoting flag (see
- below).
+ below). This object may also be a PEP-435-compliant enumerated
+ type.
- :param strict: Defaults to False: ensure that a given value is in this
- ENUM's range of permissible values when inserting or updating rows.
- Note that MySQL will not raise a fatal error if you attempt to store
- an out of range value- an alternate value will be stored instead.
- (See MySQL ENUM documentation.)
+ .. versionadded: 1.1 added support for PEP-435-compliant enumerated
+ types.
+
+ :param strict: This flag has no effect.
+
+ .. versionchanged:: The MySQL ENUM type as well as the base Enum
+ type now validates all Python data values.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
@@ -109,8 +112,9 @@ class ENUM(sqltypes.Enum, _EnumeratedValues):
literals for you. This is a transitional option.
"""
- values, length = self._init_values(enums, kw)
- self.strict = kw.pop('strict', False)
+
+ kw.pop('strict', None)
+ sqltypes.Enum.__init__(self, *enums)
kw.pop('metadata', None)
kw.pop('schema', None)
kw.pop('name', None)
@@ -118,29 +122,17 @@ class ENUM(sqltypes.Enum, _EnumeratedValues):
kw.pop('native_enum', None)
kw.pop('inherit_schema', None)
kw.pop('_create_events', None)
- _StringType.__init__(self, length=length, **kw)
- sqltypes.Enum.__init__(self, *values)
+ _StringType.__init__(self, length=self.length, **kw)
+
+ def _setup_for_values(self, values, objects, kw):
+ values, length = self._init_values(values, kw)
+ return sqltypes.Enum._setup_for_values(self, values, objects, kw)
def __repr__(self):
return util.generic_repr(
self, to_inspect=[ENUM, _StringType, sqltypes.Enum])
- def bind_processor(self, dialect):
- super_convert = super(ENUM, self).bind_processor(dialect)
-
- def process(value):
- if self.strict and value is not None and value not in self.enums:
- raise exc.InvalidRequestError('"%s" not a valid value for '
- 'this enum' % value)
- if super_convert:
- return super_convert(value)
- else:
- return value
- return process
-
def adapt(self, cls, **kw):
- if issubclass(cls, ENUM):
- kw['strict'] = self.strict
return sqltypes.Enum.adapt(self, cls, **kw)
diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
index 58b70737f..4e365884e 100644
--- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py
+++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
@@ -1,5 +1,5 @@
# mysql/gaerdbms.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/json.py b/lib/sqlalchemy/dialects/mysql/json.py
index a30cdc841..b2d5a78b5 100644
--- a/lib/sqlalchemy/dialects/mysql/json.py
+++ b/lib/sqlalchemy/dialects/mysql/json.py
@@ -1,5 +1,5 @@
# mysql/json.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index 3a4eeec05..8fe6a9182 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -1,5 +1,5 @@
# mysql/mysqlconnector.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -173,4 +173,17 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
def _compat_fetchone(self, rp, charset=None):
return rp.fetchone()
+ _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
+ 'READ COMMITTED', 'REPEATABLE READ',
+ 'AUTOCOMMIT'])
+
+ def _set_isolation_level(self, connection, level):
+ if level == 'AUTOCOMMIT':
+ connection.autocommit = True
+ else:
+ connection.autocommit = False
+ super(MySQLDialect_mysqlconnector, self)._set_isolation_level(
+ connection, level)
+
+
dialect = MySQLDialect_mysqlconnector
diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py
index 4a7ba7e1d..aa8377b27 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqldb.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py
@@ -1,5 +1,5 @@
# mysql/mysqldb.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -194,5 +194,17 @@ class MySQLDialect_mysqldb(MySQLDialect):
else:
return cset_name()
+ _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
+ 'READ COMMITTED', 'REPEATABLE READ',
+ 'AUTOCOMMIT'])
+
+ def _set_isolation_level(self, connection, level):
+ if level == 'AUTOCOMMIT':
+ connection.autocommit(True)
+ else:
+ connection.autocommit(False)
+ super(MySQLDialect_mysqldb, self)._set_isolation_level(connection,
+ level)
+
dialect = MySQLDialect_mysqldb
diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py
index ae8abc321..b91db1836 100644
--- a/lib/sqlalchemy/dialects/mysql/oursql.py
+++ b/lib/sqlalchemy/dialects/mysql/oursql.py
@@ -1,5 +1,5 @@
# mysql/oursql.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py
index 87159b561..3c493fbfc 100644
--- a/lib/sqlalchemy/dialects/mysql/pymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/pymysql.py
@@ -1,5 +1,5 @@
# mysql/pymysql.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py
index b544f0584..882d3ea4e 100644
--- a/lib/sqlalchemy/dialects/mysql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py
@@ -1,5 +1,5 @@
# mysql/pyodbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/reflection.py b/lib/sqlalchemy/dialects/mysql/reflection.py
index cf1078252..77bf9105e 100644
--- a/lib/sqlalchemy/dialects/mysql/reflection.py
+++ b/lib/sqlalchemy/dialects/mysql/reflection.py
@@ -1,5 +1,5 @@
# mysql/reflection.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/types.py b/lib/sqlalchemy/dialects/mysql/types.py
index 9512982f5..d20b8bdfc 100644
--- a/lib/sqlalchemy/dialects/mysql/types.py
+++ b/lib/sqlalchemy/dialects/mysql/types.py
@@ -1,5 +1,5 @@
# mysql/types.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index 37b0b6309..fe4c13705 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -1,5 +1,5 @@
# mysql/zxjdbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py
index b055b0b16..0c5c3174b 100644
--- a/lib/sqlalchemy/dialects/oracle/__init__.py
+++ b/lib/sqlalchemy/dialects/oracle/__init__.py
@@ -1,5 +1,5 @@
# oracle/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 82ec72f2b..6992670c4 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -1,5 +1,5 @@
# oracle/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index dede3b21a..0c93ced97 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -1,5 +1,5 @@
# oracle/cx_oracle.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -415,6 +415,21 @@ class _OracleLong(oracle.LONG):
class _OracleString(_NativeUnicodeMixin, sqltypes.String):
pass
+class _OracleEnum(_NativeUnicodeMixin, sqltypes.Enum):
+ def bind_processor(self, dialect):
+ enum_proc = sqltypes.Enum.bind_processor(self, dialect)
+ if util.py2k:
+ unicode_proc = _NativeUnicodeMixin.bind_processor(self, dialect)
+ else:
+ unicode_proc = None
+
+ def process(value):
+ raw_str = enum_proc(value)
+ if unicode_proc:
+ raw_str = unicode_proc(raw_str)
+ return raw_str
+ return process
+
class _OracleUnicodeText(
_LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText):
@@ -651,6 +666,7 @@ class OracleDialect_cx_oracle(OracleDialect):
sqltypes.String: _OracleString,
sqltypes.UnicodeText: _OracleUnicodeText,
sqltypes.CHAR: _OracleChar,
+ sqltypes.Enum: _OracleEnum,
# a raw LONG is a text type, but does *not*
# get the LobMixin with cx_oracle.
diff --git a/lib/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
index b12082f4c..c3259feae 100644
--- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
@@ -1,5 +1,5 @@
# oracle/zxjdbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -70,8 +70,7 @@ class OracleCompiler_zxjdbc(OracleCompiler):
expression._select_iterables(returning_cols))
# within_columns_clause=False so that labels (foo AS bar) don't render
- columns = [self.process(c, within_columns_clause=False,
- result_map=self.result_map)
+ columns = [self.process(c, within_columns_clause=False)
for c in self.returning_cols]
if not hasattr(self, 'returning_parameters'):
diff --git a/lib/sqlalchemy/dialects/postgres.py b/lib/sqlalchemy/dialects/postgres.py
deleted file mode 100644
index 3335333e5..000000000
--- a/lib/sqlalchemy/dialects/postgres.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# dialects/postgres.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
-# <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-# backwards compat with the old name
-from sqlalchemy.util import warn_deprecated
-
-warn_deprecated(
- "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to "
- "'postgresql'. The new URL format is "
- "postgresql[+driver]://<user>:<pass>@<host>/<dbname>"
-)
-
-from sqlalchemy.dialects.postgresql import *
-from sqlalchemy.dialects.postgresql import base
diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py
index d67f2a07e..8aa4509be 100644
--- a/lib/sqlalchemy/dialects/postgresql/__init__.py
+++ b/lib/sqlalchemy/dialects/postgresql/__init__.py
@@ -1,5 +1,5 @@
# postgresql/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py
index f4316d318..8d811aa55 100644
--- a/lib/sqlalchemy/dialects/postgresql/array.py
+++ b/lib/sqlalchemy/dialects/postgresql/array.py
@@ -1,5 +1,5 @@
# postgresql/array.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 692305319..eb3449e40 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -1,5 +1,5 @@
# postgresql/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -50,11 +50,12 @@ Transaction Isolation Level
All Postgresql dialects support setting of transaction isolation level
both via a dialect-specific parameter :paramref:`.create_engine.isolation_level`
accepted by :func:`.create_engine`,
-as well as the ``isolation_level`` argument as passed to
+as well as the :paramref:`.Connection.execution_options.isolation_level` argument as passed to
:meth:`.Connection.execution_options`. When using a non-psycopg2 dialect,
this feature works by issuing the command
``SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL <level>`` for
-each new connection.
+each new connection. For the special AUTOCOMMIT isolation level, DBAPI-specific
+techniques are used.
To set isolation level using :func:`.create_engine`::
@@ -76,10 +77,7 @@ Valid values for ``isolation_level`` include:
* ``READ UNCOMMITTED``
* ``REPEATABLE READ``
* ``SERIALIZABLE``
-
-The :mod:`~sqlalchemy.dialects.postgresql.psycopg2` and
-:mod:`~sqlalchemy.dialects.postgresql.pg8000` dialects also offer the
-special level ``AUTOCOMMIT``.
+* ``AUTOCOMMIT`` - on psycopg2 / pg8000 only
.. seealso::
@@ -347,6 +345,7 @@ syntaxes. It uses SQLAlchemy's hints mechanism::
# DELETE FROM ONLY ...
table.delete().with_hint('ONLY', dialect_name='postgresql')
+
.. _postgresql_indexes:
Postgresql-Specific Index Options
diff --git a/lib/sqlalchemy/dialects/postgresql/ext.py b/lib/sqlalchemy/dialects/postgresql/ext.py
index 66c7ed0e5..8a6524f42 100644
--- a/lib/sqlalchemy/dialects/postgresql/ext.py
+++ b/lib/sqlalchemy/dialects/postgresql/ext.py
@@ -1,5 +1,5 @@
# postgresql/ext.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py
index d2d20386a..67923fe39 100644
--- a/lib/sqlalchemy/dialects/postgresql/hstore.py
+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py
@@ -1,5 +1,5 @@
# postgresql/hstore.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py
index 6ff9fd88e..b0f0f7cf0 100644
--- a/lib/sqlalchemy/dialects/postgresql/json.py
+++ b/lib/sqlalchemy/dialects/postgresql/json.py
@@ -1,5 +1,5 @@
# postgresql/json.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index 2c745e6f7..9daab5851 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -1,5 +1,5 @@
# postgresql/pg8000.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors <see AUTHORS
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors <see AUTHORS
# file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 82fcc9054..fe245b21d 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -1,5 +1,5 @@
# postgresql/psycopg2.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py
index 97f241d2e..ab99a8392 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py
@@ -1,5 +1,5 @@
# testing/engines.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
index db6d5e16c..f2b850a9a 100644
--- a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
+++ b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
@@ -1,5 +1,5 @@
# postgresql/pypostgresql.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py
index 59c35c871..42a1cd4b1 100644
--- a/lib/sqlalchemy/dialects/postgresql/ranges.py
+++ b/lib/sqlalchemy/dialects/postgresql/ranges.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2013-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2013-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
index 1b542152c..cc464601b 100644
--- a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
@@ -1,5 +1,5 @@
# postgresql/zxjdbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py
index 608630a25..a8dec300a 100644
--- a/lib/sqlalchemy/dialects/sqlite/__init__.py
+++ b/lib/sqlalchemy/dialects/sqlite/__init__.py
@@ -1,5 +1,5 @@
# sqlite/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index 0e048aeff..ddd869448 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -1,5 +1,5 @@
# sqlite/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -1297,12 +1297,20 @@ class SQLiteDialect(default.DefaultDialect):
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
+ constraint_name = None
+ table_data = self._get_table_sql(connection, table_name, schema=schema)
+ if table_data:
+ PK_PATTERN = 'CONSTRAINT (\w+) PRIMARY KEY'
+ result = re.search(PK_PATTERN, table_data, re.I)
+ constraint_name = result.group(1) if result else None
+
cols = self.get_columns(connection, table_name, schema, **kw)
pkeys = []
for col in cols:
if col['primary_key']:
pkeys.append(col['name'])
- return {'constrained_columns': pkeys, 'name': None}
+
+ return {'constrained_columns': pkeys, 'name': constraint_name}
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py
index 9166e36bc..bbafc8d60 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py
@@ -1,5 +1,5 @@
# sqlite/pysqlcipher.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index e1c443477..33d04deeb 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -1,5 +1,5 @@
# sqlite/pysqlite.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/sybase/__init__.py b/lib/sqlalchemy/dialects/sybase/__init__.py
index 0c55de1d6..18535edcb 100644
--- a/lib/sqlalchemy/dialects/sybase/__init__.py
+++ b/lib/sqlalchemy/dialects/sybase/__init__.py
@@ -1,5 +1,5 @@
# sybase/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index 187521831..fba13c722 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -1,5 +1,5 @@
# sybase/base.py
-# Copyright (C) 2010-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
# get_select_precolumns(), limit_clause() implementation
# copyright (C) 2007 Fisch Asset Management
diff --git a/lib/sqlalchemy/dialects/sybase/mxodbc.py b/lib/sqlalchemy/dialects/sybase/mxodbc.py
index 240b634d4..60e6510a5 100644
--- a/lib/sqlalchemy/dialects/sybase/mxodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/mxodbc.py
@@ -1,5 +1,5 @@
# sybase/mxodbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py
index 168997074..348ca321d 100644
--- a/lib/sqlalchemy/dialects/sybase/pyodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py
@@ -1,5 +1,5 @@
# sybase/pyodbc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/dialects/sybase/pysybase.py b/lib/sqlalchemy/dialects/sybase/pysybase.py
index a30739444..41ca47fd3 100644
--- a/lib/sqlalchemy/dialects/sybase/pysybase.py
+++ b/lib/sqlalchemy/dialects/sybase/pysybase.py
@@ -1,5 +1,5 @@
# sybase/pysybase.py
-# Copyright (C) 2010-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 02c35d6a9..adca6694e 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -1,5 +1,5 @@
# engine/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 0b928566d..859819a34 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1,5 +1,5 @@
# engine/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -273,6 +273,8 @@ class Connection(Connectable):
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
+ :ref:`SQL Server Transaction Isolation <mssql_isolation_level>`
+
:ref:`session_transaction_isolation` - for the ORM
:param no_parameters: When ``True``, if the final parameter
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 9f845e79d..3ed2d5ee8 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -1,5 +1,5 @@
# engine/default.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index c84823d1e..26731f9a5 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -1,5 +1,5 @@
# engine/interfaces.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 6880660ce..eaa5e2e48 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -1,5 +1,5 @@
# engine/reflection.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 9208686e1..afa6b4dca 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -1,5 +1,5 @@
# engine/result.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -10,7 +10,7 @@ and :class:`.RowProxy."""
from .. import exc, util
-from ..sql import expression, sqltypes
+from ..sql import expression, sqltypes, util as sql_util
import collections
import operator
@@ -153,7 +153,7 @@ class RowProxy(BaseRowProxy):
return self._op(other, operator.ne)
def __repr__(self):
- return repr(tuple(self))
+ return repr(sql_util._repr_row(self))
def has_key(self, key):
"""Return True if this RowProxy contains the given key."""
@@ -568,11 +568,11 @@ class ResultMetaData(object):
else:
return self._key_fallback(key, False) is not None
- def _getter(self, key):
+ def _getter(self, key, raiseerr=True):
if key in self._keymap:
processor, obj, index = self._keymap[key]
else:
- ret = self._key_fallback(key, False)
+ ret = self._key_fallback(key, raiseerr)
if ret is None:
return None
processor, obj, index = ret
@@ -647,13 +647,13 @@ class ResultProxy(object):
context.engine._should_log_debug()
self._init_metadata()
- def _getter(self, key):
+ def _getter(self, key, raiseerr=True):
try:
getter = self._metadata._getter
except AttributeError:
return self._non_result(None)
else:
- return getter(key)
+ return getter(key, raiseerr)
def _has_key(self, key):
try:
@@ -1087,7 +1087,7 @@ class ResultProxy(object):
log = self.context.engine.logger.debug
l = []
for row in rows:
- log("Row %r", row)
+ log("Row %r", sql_util._repr_row(row))
l.append(process_row(metadata, row, processors, keymap))
return l
else:
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index d8e2d4764..82800a918 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -1,5 +1,5 @@
# engine/strategies.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index 0d6e1c0f1..505d1fadd 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -1,5 +1,5 @@
# engine/threadlocal.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index 9a955948a..cdb3bb7bb 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -1,5 +1,5 @@
# engine/url.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py
index 3734c9960..d28d87098 100644
--- a/lib/sqlalchemy/engine/util.py
+++ b/lib/sqlalchemy/engine/util.py
@@ -1,5 +1,5 @@
# engine/util.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/event/__init__.py b/lib/sqlalchemy/event/__init__.py
index c9bdb9a0e..dddb92429 100644
--- a/lib/sqlalchemy/event/__init__.py
+++ b/lib/sqlalchemy/event/__init__.py
@@ -1,5 +1,5 @@
# event/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py
index 86ef094d6..0af48dff3 100644
--- a/lib/sqlalchemy/event/api.py
+++ b/lib/sqlalchemy/event/api.py
@@ -1,5 +1,5 @@
# event/api.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py
index 8a88e40ef..14940136a 100644
--- a/lib/sqlalchemy/event/attr.py
+++ b/lib/sqlalchemy/event/attr.py
@@ -1,5 +1,5 @@
# event/attr.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py
index 1fe83eea2..81ef5d89c 100644
--- a/lib/sqlalchemy/event/base.py
+++ b/lib/sqlalchemy/event/base.py
@@ -1,5 +1,5 @@
# event/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/event/legacy.py b/lib/sqlalchemy/event/legacy.py
index daa74226f..b359bf48a 100644
--- a/lib/sqlalchemy/event/legacy.py
+++ b/lib/sqlalchemy/event/legacy.py
@@ -1,5 +1,5 @@
# event/legacy.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py
index a6eabb2ff..e1e9262b6 100644
--- a/lib/sqlalchemy/event/registry.py
+++ b/lib/sqlalchemy/event/registry.py
@@ -1,5 +1,5 @@
# event/registry.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py
index 0249b2623..c679db37d 100644
--- a/lib/sqlalchemy/events.py
+++ b/lib/sqlalchemy/events.py
@@ -1,5 +1,5 @@
# sqlalchemy/events.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -409,6 +409,46 @@ class PoolEvents(event.Events):
"""
+ def close(self, dbapi_connection, connection_record):
+ """Called when a DBAPI connection is closed.
+
+ The event is emitted before the close occurs.
+
+ The close of a connection can fail; typically this is because
+ the connection is already closed. If the close operation fails,
+ the connection is discarded.
+
+ The :meth:`.close` event corresponds to a connection that's still
+ associated with the pool. To intercept close events for detached
+ connections use :meth:`.close_detached`.
+
+ .. versionadded:: 1.1
+
+ """
+
+ def detach(self, dbapi_connection, connection_record):
+ """Called when a DBAPI connection is "detached" from a pool.
+
+ This event is emitted after the detach occurs. The connection
+ is no longer associated with the given connection record.
+
+ .. versionadded:: 1.1
+
+ """
+
+ def close_detached(self, dbapi_connection):
+ """Called when a detached DBAPI connection is closed.
+
+ The event is emitted before the close occurs.
+
+ The close of a connection can fail; typically this is because
+ the connection is already closed. If the close operation fails,
+ the connection is discarded.
+
+ .. versionadded:: 1.1
+
+ """
+
class ConnectionEvents(event.Events):
"""Available events for :class:`.Connectable`, which includes
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index 3a4f346e0..272984229 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -1,5 +1,5 @@
# sqlalchemy/exc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/__init__.py b/lib/sqlalchemy/ext/__init__.py
index 60a17c65e..1c8a59a18 100644
--- a/lib/sqlalchemy/ext/__init__.py
+++ b/lib/sqlalchemy/ext/__init__.py
@@ -1,5 +1,5 @@
# ext/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py
index 31f16287d..fdc44f386 100644
--- a/lib/sqlalchemy/ext/associationproxy.py
+++ b/lib/sqlalchemy/ext/associationproxy.py
@@ -1,5 +1,5 @@
# ext/associationproxy.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py
index 616cd070d..023d11ca8 100644
--- a/lib/sqlalchemy/ext/automap.py
+++ b/lib/sqlalchemy/ext/automap.py
@@ -1,5 +1,5 @@
# ext/automap.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py
index d255b5ee4..bfdc1e1a0 100644
--- a/lib/sqlalchemy/ext/baked.py
+++ b/lib/sqlalchemy/ext/baked.py
@@ -1,5 +1,5 @@
# sqlalchemy/ext/baked.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py
index d4d2ed2ef..86156be1f 100644
--- a/lib/sqlalchemy/ext/compiler.py
+++ b/lib/sqlalchemy/ext/compiler.py
@@ -1,5 +1,5 @@
# ext/compiler.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py
index f703000bb..f96a40252 100644
--- a/lib/sqlalchemy/ext/declarative/__init__.py
+++ b/lib/sqlalchemy/ext/declarative/__init__.py
@@ -1,5 +1,5 @@
# ext/declarative/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index 5fe427bc2..54e78ee1a 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -1,5 +1,5 @@
# ext/declarative/api.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py
index 57305748c..59ebe3722 100644
--- a/lib/sqlalchemy/ext/declarative/base.py
+++ b/lib/sqlalchemy/ext/declarative/base.py
@@ -1,5 +1,5 @@
# ext/declarative/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py
index 050923980..0d62bd2a8 100644
--- a/lib/sqlalchemy/ext/declarative/clsregistry.py
+++ b/lib/sqlalchemy/ext/declarative/clsregistry.py
@@ -1,5 +1,5 @@
# ext/declarative/clsregistry.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py
index c9fb0b044..996e81fca 100644
--- a/lib/sqlalchemy/ext/horizontal_shard.py
+++ b/lib/sqlalchemy/ext/horizontal_shard.py
@@ -1,5 +1,5 @@
# ext/horizontal_shard.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py
index 0073494b8..bbf386742 100644
--- a/lib/sqlalchemy/ext/hybrid.py
+++ b/lib/sqlalchemy/ext/hybrid.py
@@ -1,5 +1,5 @@
# ext/hybrid.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py
index a5fb3b790..571bbbda3 100644
--- a/lib/sqlalchemy/ext/mutable.py
+++ b/lib/sqlalchemy/ext/mutable.py
@@ -1,5 +1,5 @@
# ext/mutable.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -635,8 +635,26 @@ _setup_composite_listener()
class MutableDict(Mutable, dict):
"""A dictionary type that implements :class:`.Mutable`.
+ The :class:`.MutableDict` object implements a dictionary that will
+ emit change events to the underlying mapping when the contents of
+ the dictionary are altered, including when values are added or removed.
+
+ Note that :class:`.MutableDict` does **not** apply mutable tracking to the
+ *values themselves* inside the dictionary. Therefore it is not a sufficient
+ solution for the use case of tracking deep changes to a *recursive*
+ dictionary structure, such as a JSON structure. To support this use case,
+ build a subclass of :class:`.MutableDict` that provides appropriate
+ coersion to the values placed in the dictionary so that they too are
+ "mutable", and emit events up to their parent structure.
+
.. versionadded:: 0.8
+ .. seealso::
+
+ :class:`.MutableList`
+
+ :class:`.MutableSet`
+
"""
def __setitem__(self, key, value):
@@ -687,3 +705,181 @@ class MutableDict(Mutable, dict):
def __setstate__(self, state):
self.update(state)
+
+
+class MutableList(Mutable, list):
+ """A list type that implements :class:`.Mutable`.
+
+ The :class:`.MutableList` object implements a list that will
+ emit change events to the underlying mapping when the contents of
+ the list are altered, including when values are added or removed.
+
+ Note that :class:`.MutableList` does **not** apply mutable tracking to the
+ *values themselves* inside the list. Therefore it is not a sufficient
+ solution for the use case of tracking deep changes to a *recursive*
+ mutable structure, such as a JSON structure. To support this use case,
+ build a subclass of :class:`.MutableList` that provides appropriate
+ coersion to the values placed in the dictionary so that they too are
+ "mutable", and emit events up to their parent structure.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :class:`.MutableDict`
+
+ :class:`.MutableSet`
+
+ """
+
+ def __setitem__(self, index, value):
+ """Detect list set events and emit change events."""
+ list.__setitem__(self, index, value)
+ self.changed()
+
+ def __setslice__(self, start, end, value):
+ """Detect list set events and emit change events."""
+ list.__setslice__(self, start, end, value)
+ self.changed()
+
+ def __delitem__(self, index):
+ """Detect list del events and emit change events."""
+ list.__delitem__(self, index)
+ self.changed()
+
+ def __delslice__(self, start, end):
+ """Detect list del events and emit change events."""
+ list.__delslice__(self, start, end)
+ self.changed()
+
+ def pop(self, *arg):
+ result = list.pop(self, *arg)
+ self.changed()
+ return result
+
+ def append(self, x):
+ list.append(self, x)
+ self.changed()
+
+ def extend(self, x):
+ list.extend(self, x)
+ self.changed()
+
+ def insert(self, i, x):
+ list.insert(self, i, x)
+ self.changed()
+
+ def remove(self, i):
+ list.remove(self, i)
+ self.changed()
+
+ def clear(self):
+ list.clear(self)
+ self.changed()
+
+ def sort(self):
+ list.sort(self)
+ self.changed()
+
+ def reverse(self):
+ list.reverse(self)
+ self.changed()
+
+ @classmethod
+ def coerce(cls, index, value):
+ """Convert plain list to instance of this class."""
+ if not isinstance(value, cls):
+ if isinstance(value, list):
+ return cls(value)
+ return Mutable.coerce(index, value)
+ else:
+ return value
+
+ def __getstate__(self):
+ return list(self)
+
+ def __setstate__(self, state):
+ self[:] = state
+
+
+class MutableSet(Mutable, set):
+ """A set type that implements :class:`.Mutable`.
+
+ The :class:`.MutableSet` object implements a set that will
+ emit change events to the underlying mapping when the contents of
+ the set are altered, including when values are added or removed.
+
+ Note that :class:`.MutableSet` does **not** apply mutable tracking to the
+ *values themselves* inside the set. Therefore it is not a sufficient
+ solution for the use case of tracking deep changes to a *recursive*
+ mutable structure. To support this use case,
+ build a subclass of :class:`.MutableSet` that provides appropriate
+ coersion to the values placed in the dictionary so that they too are
+ "mutable", and emit events up to their parent structure.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :class:`.MutableDict`
+
+ :class:`.MutableList`
+
+
+ """
+
+ def update(self, *arg):
+ set.update(self, *arg)
+ self.changed()
+
+ def intersection_update(self, *arg):
+ set.intersection_update(self, *arg)
+ self.changed()
+
+ def difference_update(self, *arg):
+ set.difference_update(self, *arg)
+ self.changed()
+
+ def symmetric_difference_update(self, *arg):
+ set.symmetric_difference_update(self, *arg)
+ self.changed()
+
+ def add(self, elem):
+ set.add(self, elem)
+ self.changed()
+
+ def remove(self, elem):
+ set.remove(self, elem)
+ self.changed()
+
+ def discard(self, elem):
+ set.discard(self, elem)
+ self.changed()
+
+ def pop(self, *arg):
+ result = set.pop(self, *arg)
+ self.changed()
+ return result
+
+ def clear(self):
+ set.clear(self)
+ self.changed()
+
+ @classmethod
+ def coerce(cls, index, value):
+ """Convert plain set to instance of this class."""
+ if not isinstance(value, cls):
+ if isinstance(value, set):
+ return cls(value)
+ return Mutable.coerce(index, value)
+ else:
+ return value
+
+ def __getstate__(self):
+ return set(self)
+
+ def __setstate__(self, state):
+ self.update(state)
+
+ def __reduce_ex__(self, proto):
+ return (self.__class__, (list(self), ))
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index ac31c7cf7..d060a4f03 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -1,5 +1,5 @@
# ext/orderinglist.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
index 555f3760b..893f7be22 100644
--- a/lib/sqlalchemy/ext/serializer.py
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -1,5 +1,5 @@
# ext/serializer.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py
index a4738cc61..5c16c45c9 100644
--- a/lib/sqlalchemy/inspection.py
+++ b/lib/sqlalchemy/inspection.py
@@ -1,5 +1,5 @@
# sqlalchemy/inspect.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py
index 717e99b5e..464ad9f70 100644
--- a/lib/sqlalchemy/interfaces.py
+++ b/lib/sqlalchemy/interfaces.py
@@ -1,5 +1,5 @@
# sqlalchemy/interfaces.py
-# Copyright (C) 2007-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2007-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
# Copyright (C) 2007 Jason Kirtland jek@discorporate.us
#
diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py
index c23412e38..b23de9014 100644
--- a/lib/sqlalchemy/log.py
+++ b/lib/sqlalchemy/log.py
@@ -1,5 +1,5 @@
# sqlalchemy/log.py
-# Copyright (C) 2006-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2006-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
# Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk
#
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index d9910a070..7425737ce 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -1,5 +1,5 @@
# orm/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index 8605df785..017ad0300 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -1,5 +1,5 @@
# orm/attributes.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py
index 785bd09dd..7947cd7d7 100644
--- a/lib/sqlalchemy/orm/base.py
+++ b/lib/sqlalchemy/orm/base.py
@@ -1,5 +1,5 @@
# orm/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index 58a69227c..1e022e1dd 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -1,5 +1,5 @@
# orm/collections.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py
index f3325203e..a3e5b12f9 100644
--- a/lib/sqlalchemy/orm/dependency.py
+++ b/lib/sqlalchemy/orm/dependency.py
@@ -1,5 +1,5 @@
# orm/dependency.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py
index bb6d185d4..6477e8291 100644
--- a/lib/sqlalchemy/orm/deprecated_interfaces.py
+++ b/lib/sqlalchemy/orm/deprecated_interfaces.py
@@ -1,5 +1,5 @@
# orm/deprecated_interfaces.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py
index 17c2d28ce..6c87ef9ba 100644
--- a/lib/sqlalchemy/orm/descriptor_props.py
+++ b/lib/sqlalchemy/orm/descriptor_props.py
@@ -1,5 +1,5 @@
# orm/descriptor_props.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index ca593765f..026ebc317 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -1,5 +1,5 @@
# orm/dynamic.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py
index 1e828ff86..534e7fa8f 100644
--- a/lib/sqlalchemy/orm/evaluator.py
+++ b/lib/sqlalchemy/orm/evaluator.py
@@ -1,5 +1,5 @@
# orm/evaluator.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index 5b0cbfdad..d05fdc9cb 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -1,5 +1,5 @@
# orm/events.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py
index e010a295d..db993220f 100644
--- a/lib/sqlalchemy/orm/exc.py
+++ b/lib/sqlalchemy/orm/exc.py
@@ -1,5 +1,5 @@
# orm/exc.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index 2dfe3fd5c..0ab0548a1 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -1,5 +1,5 @@
# orm/identity.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index be2fe91c2..d41ee59cb 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -1,5 +1,5 @@
# orm/instrumentation.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index ed8f27332..faab70e37 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -1,5 +1,5 @@
# orm/interfaces.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index b5a62d6b2..d457f3c63 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -1,5 +1,5 @@
# orm/loading.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -316,7 +316,7 @@ def _instance_processor(
else:
if adapter:
col = adapter.columns[col]
- getter = result._getter(col)
+ getter = result._getter(col, False)
if getter:
populators["quick"].append((prop.key, getter))
else:
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 53afdcb28..2236b2f76 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -1,5 +1,5 @@
# orm/mapper.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py
index 9670a07fb..cf1846555 100644
--- a/lib/sqlalchemy/orm/path_registry.py
+++ b/lib/sqlalchemy/orm/path_registry.py
@@ -1,5 +1,5 @@
# orm/path_registry.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 30b39f600..a5e0d9d95 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -1,5 +1,5 @@
# orm/persistence.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -295,22 +295,24 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
instance = \
uowtransaction.session.identity_map[instance_key]
existing = attributes.instance_state(instance)
- if not uowtransaction.is_deleted(existing):
- raise orm_exc.FlushError(
- "New instance %s with identity key %s conflicts "
- "with persistent instance %s" %
- (state_str(state), instance_key,
- state_str(existing)))
-
- base_mapper._log_debug(
- "detected row switch for identity %s. "
- "will update %s, remove %s from "
- "transaction", instance_key,
- state_str(state), state_str(existing))
-
- # remove the "delete" flag from the existing element
- uowtransaction.remove_state_actions(existing)
- row_switch = existing
+
+ if not uowtransaction.was_already_deleted(existing):
+ if not uowtransaction.is_deleted(existing):
+ raise orm_exc.FlushError(
+ "New instance %s with identity key %s conflicts "
+ "with persistent instance %s" %
+ (state_str(state), instance_key,
+ state_str(existing)))
+
+ base_mapper._log_debug(
+ "detected row switch for identity %s. "
+ "will update %s, remove %s from "
+ "transaction", instance_key,
+ state_str(state), state_str(existing))
+
+ # remove the "delete" flag from the existing element
+ uowtransaction.remove_state_actions(existing)
+ row_switch = existing
if (has_identity or row_switch) and mapper.version_id_col is not None:
update_version_id = mapper._get_committed_state_attr_by_column(
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 0d4e1b771..8197e041f 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -1,5 +1,5 @@
# orm/properties.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 6b808a701..4606c2ffb 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -1,5 +1,5 @@
# orm/query.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -478,8 +478,6 @@ class Query(object):
"""Return the full SELECT statement represented by this
:class:`.Query` represented as a common table expression (CTE).
- .. versionadded:: 0.7.6
-
Parameters and usage are the same as those of the
:meth:`.SelectBase.cte` method; see that method for
further details.
@@ -528,7 +526,7 @@ class Query(object):
.. seealso::
- :meth:`.SelectBase.cte`
+ :meth:`.HasCTE.cte`
"""
return self.enable_eagerloads(False).\
@@ -884,12 +882,15 @@ class Query(object):
a subquery as returned by :meth:`.Query.subquery` is
embedded in another :func:`~.expression.select` construct.
- """
+ """
- self._correlate = self._correlate.union(
- _interpret_as_from(s)
- if s is not None else None
- for s in args)
+ for s in args:
+ if s is None:
+ self._correlate = self._correlate.union([None])
+ else:
+ self._correlate = self._correlate.union(
+ sql_util.surface_selectables(_interpret_as_from(s))
+ )
@_generative()
def autoflush(self, setting):
@@ -1920,6 +1921,10 @@ class Query(object):
.. versionadded:: 1.0.0
+ :param full=False: render FULL OUTER JOIN; implies ``isouter``.
+
+ .. versionadded:: 1.1
+
:param from_joinpoint=False: When using ``aliased=True``, a setting
of True here will cause the join to be from the most recent
joined target, rather than starting back from the original
@@ -1937,14 +1942,16 @@ class Query(object):
SQLAlchemy versions was the primary ORM-level joining interface.
"""
- aliased, from_joinpoint, isouter = kwargs.pop('aliased', False),\
+ aliased, from_joinpoint, isouter, full = kwargs.pop('aliased', False),\
kwargs.pop('from_joinpoint', False),\
- kwargs.pop('isouter', False)
+ kwargs.pop('isouter', False),\
+ kwargs.pop('full', False)
if kwargs:
raise TypeError("unknown arguments: %s" %
', '.join(sorted(kwargs)))
return self._join(props,
- outerjoin=isouter, create_aliases=aliased,
+ outerjoin=isouter, full=full,
+ create_aliases=aliased,
from_joinpoint=from_joinpoint)
def outerjoin(self, *props, **kwargs):
@@ -1954,13 +1961,14 @@ class Query(object):
Usage is the same as the ``join()`` method.
"""
- aliased, from_joinpoint = kwargs.pop('aliased', False), \
- kwargs.pop('from_joinpoint', False)
+ aliased, from_joinpoint, full = kwargs.pop('aliased', False), \
+ kwargs.pop('from_joinpoint', False), \
+ kwargs.pop('full', False)
if kwargs:
raise TypeError("unknown arguments: %s" %
', '.join(sorted(kwargs)))
return self._join(props,
- outerjoin=True, create_aliases=aliased,
+ outerjoin=True, full=full, create_aliases=aliased,
from_joinpoint=from_joinpoint)
def _update_joinpoint(self, jp):
@@ -1976,7 +1984,7 @@ class Query(object):
self._joinpath = jp
@_generative(_no_statement_condition, _no_limit_offset)
- def _join(self, keys, outerjoin, create_aliases, from_joinpoint):
+ def _join(self, keys, outerjoin, full, create_aliases, from_joinpoint):
"""consumes arguments from join() or outerjoin(), places them into a
consistent format with which to form the actual JOIN constructs.
@@ -2088,10 +2096,10 @@ class Query(object):
self._join_left_to_right(
left_entity,
right_entity, onclause,
- outerjoin, create_aliases, prop)
+ outerjoin, full, create_aliases, prop)
def _join_left_to_right(self, left, right,
- onclause, outerjoin, create_aliases, prop):
+ onclause, outerjoin, full, create_aliases, prop):
"""append a JOIN to the query's from clause."""
self._polymorphic_adapters = self._polymorphic_adapters.copy()
@@ -2156,7 +2164,7 @@ class Query(object):
else:
self._joinpoint = {'_joinpoint_entity': right}
- self._join_to_left(l_info, left, right, onclause, outerjoin)
+ self._join_to_left(l_info, left, right, onclause, outerjoin, full)
def _prepare_right_side(self, r_info, right, onclause, create_aliases,
prop, overlap):
@@ -2243,7 +2251,7 @@ class Query(object):
return right, onclause
- def _join_to_left(self, l_info, left, right, onclause, outerjoin):
+ def _join_to_left(self, l_info, left, right, onclause, outerjoin, full):
info = l_info
left_mapper = getattr(info, 'mapper', None)
left_selectable = info.selectable
@@ -2256,7 +2264,7 @@ class Query(object):
try:
clause = orm_join(clause,
right,
- onclause, isouter=outerjoin)
+ onclause, isouter=outerjoin, full=full)
except sa_exc.ArgumentError as ae:
raise sa_exc.InvalidRequestError(
"Could not find a FROM clause to join from. "
@@ -2280,7 +2288,8 @@ class Query(object):
assert clause is not None
try:
- clause = orm_join(clause, right, onclause, isouter=outerjoin)
+ clause = orm_join(
+ clause, right, onclause, isouter=outerjoin, full=full)
except sa_exc.ArgumentError as ae:
raise sa_exc.InvalidRequestError(
"Could not find a FROM clause to join from. "
@@ -2465,9 +2474,35 @@ class Query(object):
@_generative(_no_statement_condition)
def slice(self, start, stop):
- """apply LIMIT/OFFSET to the ``Query`` based on a "
- "range and return the newly resulting ``Query``."""
+ """Computes the "slice" of the :class:`.Query` represented by
+ the given indices and returns the resulting :class:`.Query`.
+
+ The start and stop indices behave like the argument to Python's
+ built-in :func:`range` function. This method provides an
+ alternative to using ``LIMIT``/``OFFSET`` to get a slice of the
+ query.
+
+ For example, ::
+
+ session.query(User).order_by(User.id).slice(1, 3)
+
+ renders as
+
+ .. sourcecode:: sql
+
+ SELECT users.id AS users_id,
+ users.name AS users_name
+ FROM users ORDER BY users.id
+ LIMIT ? OFFSET ?
+ (2, 1)
+ .. seealso::
+
+ :meth:`.Query.limit`
+
+ :meth:`.Query.offset`
+
+ """
if start is not None and stop is not None:
self._offset = (self._offset or 0) + start
self._limit = stop - start
@@ -2482,7 +2517,6 @@ class Query(object):
@_generative(_no_statement_condition)
def limit(self, limit):
"""Apply a ``LIMIT`` to the query and return the newly resulting
-
``Query``.
"""
@@ -3254,12 +3288,11 @@ class Query(object):
# then append eager joins onto that
if context.order_by:
- order_by_col_expr = list(
- chain(*[
- sql_util.unwrap_order_by(o)
- for o in context.order_by
- ])
- )
+ order_by_col_expr = \
+ sql_util.expand_column_list_from_order_by(
+ context.primary_columns,
+ context.order_by
+ )
else:
context.order_by = None
order_by_col_expr = []
@@ -3319,15 +3352,12 @@ class Query(object):
if not context.order_by:
context.order_by = None
- if self._distinct and context.order_by:
- order_by_col_expr = list(
- chain(*[
- sql_util.unwrap_order_by(o)
- for o in context.order_by
- ])
- )
- context.primary_columns += order_by_col_expr
-
+ if self._distinct is True and context.order_by:
+ context.primary_columns += \
+ sql_util.expand_column_list_from_order_by(
+ context.primary_columns,
+ context.order_by
+ )
context.froms += tuple(context.eager_joins.values())
statement = sql.select(
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index 83856eebf..17f94d4af 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -1,5 +1,5 @@
# orm/relationships.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py
index 176523c3b..6306514cb 100644
--- a/lib/sqlalchemy/orm/scoping.py
+++ b/lib/sqlalchemy/orm/scoping.py
@@ -1,5 +1,5 @@
# orm/scoping.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 56513860a..dc5de7ac6 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -1,5 +1,5 @@
# orm/session.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -235,7 +235,7 @@ class SessionTransaction(object):
return SessionTransaction(
self.session, self, nested=nested)
- def _iterate_parents(self, upto=None):
+ def _iterate_self_and_parents(self, upto=None):
current = self
result = ()
@@ -269,6 +269,11 @@ class SessionTransaction(object):
self._key_switches = weakref.WeakKeyDictionary()
def _restore_snapshot(self, dirty_only=False):
+ """Restore the restoration state taken before a transaction began.
+
+ Corresponds to a rollback.
+
+ """
assert self._is_transaction_boundary
self.session._expunge_states(
@@ -290,6 +295,11 @@ class SessionTransaction(object):
s._expire(s.dict, self.session.identity_map._modified)
def _remove_snapshot(self):
+ """Remove the restoration state taken before a transaction began.
+
+ Corresponds to a commit.
+
+ """
assert self._is_transaction_boundary
if not self.nested and self.session.expire_on_commit:
@@ -358,7 +368,7 @@ class SessionTransaction(object):
stx = self.session.transaction
if stx is not self:
- for subtransaction in stx._iterate_parents(upto=self):
+ for subtransaction in stx._iterate_self_and_parents(upto=self):
subtransaction.commit()
if not self.session._flushing:
@@ -405,26 +415,18 @@ class SessionTransaction(object):
stx = self.session.transaction
if stx is not self:
- for subtransaction in stx._iterate_parents(upto=self):
+ for subtransaction in stx._iterate_self_and_parents(upto=self):
subtransaction.close()
- if _capture_exception:
- captured_exception = sys.exc_info()[1]
-
boundary = self
+ rollback_err = None
if self._state in (ACTIVE, PREPARED):
- for transaction in self._iterate_parents():
+ for transaction in self._iterate_self_and_parents():
if transaction._parent is None or transaction.nested:
try:
transaction._rollback_impl()
- except Exception:
- if _capture_exception:
- util.warn(
- "An exception raised during a Session "
- "persistence operation cannot be raised "
- "due to an additional ROLLBACK exception; "
- "the exception is: %s" % captured_exception)
- raise
+ except:
+ rollback_err = sys.exc_info()
transaction._state = DEACTIVE
boundary = transaction
break
@@ -433,7 +435,7 @@ class SessionTransaction(object):
sess = self.session
- if sess._enable_transaction_accounting and \
+ if not rollback_err and sess._enable_transaction_accounting and \
not sess._is_clean():
# if items were added, deleted, or mutated
@@ -445,19 +447,24 @@ class SessionTransaction(object):
boundary._restore_snapshot(dirty_only=boundary.nested)
self.close()
+
if self._parent and _capture_exception:
- self._parent._rollback_exception = captured_exception
+ self._parent._rollback_exception = sys.exc_info()[1]
+
+ if rollback_err:
+ util.reraise(*rollback_err)
sess.dispatch.after_soft_rollback(sess, self)
return self._parent
def _rollback_impl(self):
- for t in set(self._connections.values()):
- t[1].rollback()
-
- if self.session._enable_transaction_accounting:
- self._restore_snapshot(dirty_only=self.nested)
+ try:
+ for t in set(self._connections.values()):
+ t[1].rollback()
+ finally:
+ if self.session._enable_transaction_accounting:
+ self._restore_snapshot(dirty_only=self.nested)
self.session.dispatch.after_rollback(self.session)
@@ -1090,7 +1097,7 @@ class Session(_SessionClassMethods):
def _close_impl(self, invalidate):
self.expunge_all()
if self.transaction is not None:
- for transaction in self.transaction._iterate_parents():
+ for transaction in self.transaction._iterate_self_and_parents():
transaction.close(invalidate)
def expunge_all(self):
@@ -1780,9 +1787,10 @@ class Session(_SessionClassMethods):
self._update_impl(merged_state)
new_instance = True
- elif not _none_set.intersection(key[1]) or \
+ elif key_is_persistent and (
+ not _none_set.intersection(key[1]) or
(mapper.allow_partial_pks and
- not _none_set.issuperset(key[1])):
+ not _none_set.issuperset(key[1]))):
merged = self.query(mapper.class_).get(key[1])
else:
merged = None
@@ -2729,18 +2737,49 @@ class sessionmaker(_SessionClassMethods):
def make_transient(instance):
- """Make the given instance 'transient'.
+ """Alter the state of the given instance so that it is :term:`transient`.
- This will remove its association with any
- session and additionally will remove its "identity key",
- such that it's as though the object were newly constructed,
- except retaining its values. It also resets the
- "deleted" flag on the state if this object
- had been explicitly deleted by its session.
+ .. note::
- Attributes which were "expired" or deferred at the
- instance level are reverted to undefined, and
- will not trigger any loads.
+ :func:`.make_transient` is a special-case function for
+ advanced use cases only.
+
+ The given mapped instance is assumed to be in the :term:`persistent` or
+ :term:`detached` state. The function will remove its association with any
+ :class:`.Session` as well as its :attr:`.InstanceState.identity`. The
+ effect is that the object will behave as though it were newly constructed,
+ except retaining any attribute / collection values that were loaded at the
+ time of the call. The :attr:`.InstanceState.deleted` flag is also reset
+ if this object had been deleted as a result of using
+ :meth:`.Session.delete`.
+
+ .. warning::
+
+ :func:`.make_transient` does **not** "unexpire" or otherwise eagerly
+ load ORM-mapped attributes that are not currently loaded at the time
+ the function is called. This includes attributes which:
+
+ * were expired via :meth:`.Session.expire`
+
+ * were expired as the natural effect of committing a session
+ transaction, e.g. :meth:`.Session.commit`
+
+ * are normally :term:`lazy loaded` but are not currently loaded
+
+ * are "deferred" via :ref:`deferred` and are not yet loaded
+
+ * were not present in the query which loaded this object, such as that
+ which is common in joined table inheritance and other scenarios.
+
+ After :func:`.make_transient` is called, unloaded attributes such
+ as those above will normally resolve to the value ``None`` when
+ accessed, or an empty collection for a collection-oriented attribute.
+ As the object is transient and un-associated with any database
+ identity, it will no longer retrieve these values.
+
+ .. seealso::
+
+ :func:`.make_transient_to_detached`
"""
state = attributes.instance_state(instance)
@@ -2762,7 +2801,12 @@ def make_transient(instance):
def make_transient_to_detached(instance):
- """Make the given transient instance 'detached'.
+ """Make the given transient instance :term:`detached`.
+
+ .. note::
+
+ :func:`.make_transient_to_detached` is a special-case function for
+ advanced use cases only.
All attribute history on the given instance
will be reset as though the instance were freshly loaded
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index b648ffa3b..1ad09ee83 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -1,5 +1,5 @@
# orm/state.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -258,7 +258,7 @@ class InstanceState(interfaces.InspectionAttr):
Returns ``None`` if the object has no primary key identity.
.. note::
- An object which is transient or pending
+ An object which is :term:`transient` or :term:`pending`
does **not** have a mapped identity until it is flushed,
even if its attributes include primary key values.
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 0252a65f9..370cb974b 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -1,5 +1,5 @@
# orm/strategies.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -174,7 +174,7 @@ class ColumnLoader(LoaderStrategy):
for col in self.columns:
if adapter:
col = adapter.columns[col]
- getter = result._getter(col)
+ getter = result._getter(col, False)
if getter:
populators["quick"].append((self.key, getter))
break
@@ -1573,13 +1573,19 @@ class JoinedLoader(AbstractRelationshipLoader):
# call _instance on the row, even though the object has
# been created, so that we further descend into properties
existing = _instance(row)
- if existing is not None \
- and key in dict_ \
- and existing is not dict_[key]:
- util.warn(
- "Multiple rows returned with "
- "uselist=False for eagerly-loaded attribute '%s' "
- % self)
+ if existing is not None:
+ # conflicting value already loaded, this shouldn't happen
+ if key in dict_:
+ if existing is not dict_[key]:
+ util.warn(
+ "Multiple rows returned with "
+ "uselist=False for eagerly-loaded attribute '%s' "
+ % self)
+ else:
+ # this case is when one row has multiple loads of the
+ # same entity (e.g. via aliasing), one has an attribute
+ # that the other doesn't.
+ dict_[key] = existing
def load_scalar_from_joined_exec(state, dict_, row):
_instance(row)
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
index aa818258a..796c17ec4 100644
--- a/lib/sqlalchemy/orm/strategy_options.py
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py
index e8e273a86..ccca50871 100644
--- a/lib/sqlalchemy/orm/sync.py
+++ b/lib/sqlalchemy/orm/sync.py
@@ -1,5 +1,5 @@
# orm/sync.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index 1ef0d24ca..f3e39d9b5 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -1,5 +1,5 @@
# orm/unitofwork.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -16,6 +16,7 @@ organizes them in order of dependency, and executes.
from .. import util, event
from ..util import topological
from . import attributes, persistence, util as orm_util
+from . import exc as orm_exc
import itertools
@@ -155,6 +156,18 @@ class UOWTransaction(object):
def has_work(self):
return bool(self.states)
+ def was_already_deleted(self, state):
+ """return true if the given state is expired and was deleted
+ previously.
+ """
+ if state.expired:
+ try:
+ state._load_expired(state, attributes.PASSIVE_OFF)
+ except orm_exc.ObjectDeletedError:
+ self.session._remove_newly_deleted([state])
+ return True
+ return False
+
def is_deleted(self, state):
"""return true if the given state is marked as deleted
within this uowtransaction."""
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 46183a47d..f76be0380 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -1,5 +1,5 @@
# orm/util.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -783,7 +783,7 @@ class _ORMJoin(expression.Join):
def __init__(
self,
left, right, onclause=None, isouter=False,
- _left_memo=None, _right_memo=None):
+ full=False, _left_memo=None, _right_memo=None):
left_info = inspection.inspect(left)
left_orm_info = getattr(left, '_joined_from_info', left_info)
@@ -835,7 +835,7 @@ class _ORMJoin(expression.Join):
onclause = pj
self._target_adapter = target_adapter
- expression.Join.__init__(self, left, right, onclause, isouter)
+ expression.Join.__init__(self, left, right, onclause, isouter, full)
if not prop and getattr(right_info, 'mapper', None) \
and right_info.mapper.single:
@@ -874,14 +874,20 @@ class _ORMJoin(expression.Join):
_right_memo=other._right_memo
)
- def join(self, right, onclause=None, isouter=False, join_to_left=None):
- return _ORMJoin(self, right, onclause, isouter)
+ def join(
+ self, right, onclause=None,
+ isouter=False, full=False, join_to_left=None):
+ return _ORMJoin(self, right, onclause, full, isouter)
- def outerjoin(self, right, onclause=None, join_to_left=None):
- return _ORMJoin(self, right, onclause, True)
+ def outerjoin(
+ self, right, onclause=None,
+ full=False, join_to_left=None):
+ return _ORMJoin(self, right, onclause, True, full=full)
-def join(left, right, onclause=None, isouter=False, join_to_left=None):
+def join(
+ left, right, onclause=None, isouter=False,
+ full=False, join_to_left=None):
"""Produce an inner join between left and right clauses.
:func:`.orm.join` is an extension to the core join interface
@@ -919,10 +925,10 @@ def join(left, right, onclause=None, isouter=False, join_to_left=None):
is no longer used, and is deprecated.
"""
- return _ORMJoin(left, right, onclause, isouter)
+ return _ORMJoin(left, right, onclause, isouter, full)
-def outerjoin(left, right, onclause=None, join_to_left=None):
+def outerjoin(left, right, onclause=None, full=False, join_to_left=None):
"""Produce a left outer join between left and right clauses.
This is the "outer join" version of the :func:`.orm.join` function,
@@ -930,7 +936,7 @@ def outerjoin(left, right, onclause=None, join_to_left=None):
See that function's documentation for other usage details.
"""
- return _ORMJoin(left, right, onclause, True)
+ return _ORMJoin(left, right, onclause, True, full)
def with_parent(instance, prop):
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index 4dd954fc4..4bd8f60ec 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -1,5 +1,5 @@
# sqlalchemy/pool.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -286,6 +286,7 @@ class Pool(log.Identified):
def _close_connection(self, connection):
self.logger.debug("Closing connection %r", connection)
+
try:
self._dialect.do_close(connection)
except Exception:
@@ -446,14 +447,9 @@ class _ConnectionRecord(object):
def __init__(self, pool):
self.__pool = pool
- self.connection = self.__connect()
+ self.__connect(first_connect_check=True)
self.finalize_callback = deque()
- pool.dispatch.first_connect.\
- for_modify(pool.dispatch).\
- exec_once(self.connection, self)
- pool.dispatch.connect(self.connection, self)
-
connection = None
"""A reference to the actual DBAPI connection being tracked.
@@ -560,9 +556,7 @@ class _ConnectionRecord(object):
recycle = False
if self.connection is None:
self.info.clear()
- self.connection = self.__connect()
- if self.__pool.dispatch.connect:
- self.__pool.dispatch.connect(self.connection, self)
+ self.__connect()
elif self.__pool._recycle > -1 and \
time.time() - self.starttime > self.__pool._recycle:
self.__pool.logger.info(
@@ -588,28 +582,36 @@ class _ConnectionRecord(object):
self.__close()
self.info.clear()
- # ensure that if self.__connect() fails,
- # we are not referring to the previous stale connection here
- self.connection = None
- self.connection = self.__connect()
-
- if self.__pool.dispatch.connect:
- self.__pool.dispatch.connect(self.connection, self)
+ self.__connect()
return self.connection
def __close(self):
self.finalize_callback.clear()
+ if self.__pool.dispatch.close:
+ self.__pool.dispatch.close(self.connection, self)
self.__pool._close_connection(self.connection)
- def __connect(self):
+ def __connect(self, first_connect_check=False):
+ pool = self.__pool
+
+ # ensure any existing connection is removed, so that if
+ # creator fails, this attribute stays None
+ self.connection = None
try:
self.starttime = time.time()
- connection = self.__pool._invoke_creator(self)
- self.__pool.logger.debug("Created new connection %r", connection)
- return connection
+ connection = pool._invoke_creator(self)
+ pool.logger.debug("Created new connection %r", connection)
+ self.connection = connection
except Exception as e:
- self.__pool.logger.debug("Error on connect(): %s", e)
+ pool.logger.debug("Error on connect(): %s", e)
raise
+ else:
+ if first_connect_check:
+ pool.dispatch.first_connect.\
+ for_modify(pool.dispatch).\
+ exec_once(self.connection, self)
+ if pool.dispatch.connect:
+ pool.dispatch.connect(self.connection, self)
def _finalize_fairy(connection, connection_record,
@@ -637,6 +639,8 @@ def _finalize_fairy(connection, connection_record,
# Immediately close detached instances
if not connection_record:
+ if pool.dispatch.close_detached:
+ pool.dispatch.close_detached(connection)
pool._close_connection(connection)
except BaseException as e:
pool.logger.error(
@@ -868,14 +872,18 @@ class _ConnectionFairy(object):
"""
if self._connection_record is not None:
- _refs.remove(self._connection_record)
- self._connection_record.fairy_ref = None
- self._connection_record.connection = None
+ rec = self._connection_record
+ _refs.remove(rec)
+ rec.fairy_ref = None
+ rec.connection = None
# TODO: should this be _return_conn?
self._pool._do_return_conn(self._connection_record)
self.info = self.info.copy()
self._connection_record = None
+ if self._pool.dispatch.detach:
+ self._pool.dispatch.detach(self.connection, rec)
+
def close(self):
self._counter -= 1
if self._counter == 0:
diff --git a/lib/sqlalchemy/processors.py b/lib/sqlalchemy/processors.py
index 6575fad17..b57e6740b 100644
--- a/lib/sqlalchemy/processors.py
+++ b/lib/sqlalchemy/processors.py
@@ -1,5 +1,5 @@
# sqlalchemy/processors.py
-# Copyright (C) 2010-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2010-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
#
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index 327498fc5..5b703f7b6 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -1,5 +1,5 @@
# schema.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index f4ad3ec00..7f7abacc2 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -1,5 +1,5 @@
# sql/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -46,6 +46,7 @@ from .expression import (
intersect_all,
join,
label,
+ lateral,
literal,
literal_column,
modifier,
diff --git a/lib/sqlalchemy/sql/annotation.py b/lib/sqlalchemy/sql/annotation.py
index 8fec5039b..6ad25abaa 100644
--- a/lib/sqlalchemy/sql/annotation.py
+++ b/lib/sqlalchemy/sql/annotation.py
@@ -1,5 +1,5 @@
# sql/annotation.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py
index 97fb04dd9..cf7dcfd31 100644
--- a/lib/sqlalchemy/sql/base.py
+++ b/lib/sqlalchemy/sql/base.py
@@ -1,5 +1,5 @@
# sql/base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 492999d16..3d2f02006 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -1,5 +1,5 @@
# sql/compiler.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -53,7 +53,7 @@ LEGAL_CHARACTERS = re.compile(r'^[A-Z0-9_$]+$', re.I)
ILLEGAL_INITIAL_CHARACTERS = set([str(x) for x in range(0, 10)]).union(['$'])
BIND_PARAMS = re.compile(r'(?<![:\w\$\x5c]):([\w\$]+)(?![:\w\$])', re.UNICODE)
-BIND_PARAMS_ESC = re.compile(r'\x5c(:[\w\$]+)(?![:\w\$])', re.UNICODE)
+BIND_PARAMS_ESC = re.compile(r'\x5c(:[\w\$]*)(?![:\w\$])', re.UNICODE)
BIND_TEMPLATES = {
'pyformat': "%%(%(name)s)s",
@@ -418,6 +418,11 @@ class SQLCompiler(Compiled):
self.truncated_names = {}
Compiled.__init__(self, dialect, statement, **kwargs)
+ if (
+ self.isinsert or self.isupdate or self.isdelete
+ ) and statement._returning:
+ self.returning = statement._returning
+
if self.positional and dialect.paramstyle == 'numeric':
self._apply_numbered_params()
@@ -1315,6 +1320,10 @@ class SQLCompiler(Compiled):
else:
return alias.original._compiler_dispatch(self, **kwargs)
+ def visit_lateral(self, lateral, **kw):
+ kw['lateral'] = True
+ return "LATERAL %s" % self.visit_alias(lateral, **kw)
+
def get_render_as_alias_suffix(self, alias_name_text):
return " AS " + alias_name_text
@@ -1527,7 +1536,7 @@ class SQLCompiler(Compiled):
('asfrom_froms', frozenset())
])
- def _display_froms_for_select(self, select, asfrom):
+ def _display_froms_for_select(self, select, asfrom, lateral=False):
# utility method to help external dialects
# get the correct from list for a select.
# specifically the oracle dialect needs this feature
@@ -1538,7 +1547,7 @@ class SQLCompiler(Compiled):
correlate_froms = entry['correlate_froms']
asfrom_froms = entry['asfrom_froms']
- if asfrom:
+ if asfrom and not lateral:
froms = select._get_display_froms(
explicit_correlate_froms=correlate_froms.difference(
asfrom_froms),
@@ -1554,6 +1563,7 @@ class SQLCompiler(Compiled):
compound_index=0,
nested_join_translation=False,
select_wraps_for=None,
+ lateral=False,
**kwargs):
needs_nested_translation = \
@@ -1593,7 +1603,7 @@ class SQLCompiler(Compiled):
select, transformed_select)
return text
- froms = self._setup_select_stack(select, entry, asfrom)
+ froms = self._setup_select_stack(select, entry, asfrom, lateral)
column_clause_args = kwargs.copy()
column_clause_args.update({
@@ -1615,7 +1625,6 @@ class SQLCompiler(Compiled):
select, select._prefixes, **kwargs)
text += self.get_select_precolumns(select, **kwargs)
-
# the actual list of columns to print in the SELECT column list.
inner_columns = [
c for c in [
@@ -1633,15 +1642,14 @@ class SQLCompiler(Compiled):
if populate_result_map and select_wraps_for is not None:
# if this select is a compiler-generated wrapper,
# rewrite the targeted columns in the result map
- wrapped_inner_columns = set(select_wraps_for.inner_columns)
+
translate = dict(
- (outer, inner.pop()) for outer, inner in [
- (
- outer,
- outer.proxy_set.intersection(wrapped_inner_columns))
- for outer in select.inner_columns
- ] if inner
+ zip(
+ [name for (key, name) in select._columns_plus_names],
+ [name for (key, name) in
+ select_wraps_for._columns_plus_names])
)
+
self._result_columns = [
(key, name, tuple(translate.get(o, o) for o in obj), type_)
for key, name, obj, type_ in self._result_columns
@@ -1659,7 +1667,7 @@ class SQLCompiler(Compiled):
if per_dialect:
text += " " + self.get_statement_hint_text(per_dialect)
- if self.ctes and self._is_toplevel_select(select):
+ if self.ctes and toplevel:
text = self._render_cte_clause() + text
if select._suffixes:
@@ -1668,25 +1676,11 @@ class SQLCompiler(Compiled):
self.stack.pop(-1)
- if asfrom and parens:
+ if (asfrom or lateral) and parens:
return "(" + text + ")"
else:
return text
- def _is_toplevel_select(self, select):
- """Return True if the stack is placed at the given select, and
- is also the outermost SELECT, meaning there is either no stack
- before this one, or the enclosing stack is a topmost INSERT.
-
- """
- return (
- self.stack[-1]['selectable'] is select and
- (
- len(self.stack) == 1 or self.isinsert and len(self.stack) == 2
- and self.statement is self.stack[0]['selectable']
- )
- )
-
def _setup_select_hints(self, select):
byfrom = dict([
(from_, hinttext % {
@@ -1700,11 +1694,11 @@ class SQLCompiler(Compiled):
hint_text = self.get_select_hint_text(byfrom)
return hint_text, byfrom
- def _setup_select_stack(self, select, entry, asfrom):
+ def _setup_select_stack(self, select, entry, asfrom, lateral):
correlate_froms = entry['correlate_froms']
asfrom_froms = entry['asfrom_froms']
- if asfrom:
+ if asfrom and not lateral:
froms = select._get_display_froms(
explicit_correlate_froms=correlate_froms.difference(
asfrom_froms),
@@ -1723,6 +1717,7 @@ class SQLCompiler(Compiled):
'selectable': select,
}
self.stack.append(new_entry)
+
return froms
def _compose_select_body(
@@ -1852,9 +1847,15 @@ class SQLCompiler(Compiled):
return ""
def visit_join(self, join, asfrom=False, **kwargs):
+ if join.full:
+ join_type = " FULL OUTER JOIN "
+ elif join.isouter:
+ join_type = " LEFT OUTER JOIN "
+ else:
+ join_type = " JOIN "
return (
join.left._compiler_dispatch(self, asfrom=True, **kwargs) +
- (join.isouter and " LEFT OUTER JOIN " or " JOIN ") +
+ join_type +
join.right._compiler_dispatch(self, asfrom=True, **kwargs) +
" ON " +
join.onclause._compiler_dispatch(self, **kwargs)
@@ -1876,14 +1877,16 @@ class SQLCompiler(Compiled):
)
return dialect_hints, table_text
- def visit_insert(self, insert_stmt, **kw):
+ def visit_insert(self, insert_stmt, asfrom=False, **kw):
+ toplevel = not self.stack
+
self.stack.append(
{'correlate_froms': set(),
"asfrom_froms": set(),
"selectable": insert_stmt})
- self.isinsert = True
- crud_params = crud._get_crud_params(self, insert_stmt, **kw)
+ crud_params = crud._setup_crud_params(
+ self, insert_stmt, crud.ISINSERT, **kw)
if not crud_params and \
not self.dialect.supports_default_values and \
@@ -1929,12 +1932,13 @@ class SQLCompiler(Compiled):
for c in crud_params_single])
if self.returning or insert_stmt._returning:
- self.returning = self.returning or insert_stmt._returning
returning_clause = self.returning_clause(
- insert_stmt, self.returning)
+ insert_stmt, self.returning or insert_stmt._returning)
if self.returning_precedes_values:
text += " " + returning_clause
+ else:
+ returning_clause = None
if insert_stmt.select is not None:
text += " %s" % self.process(self._insert_from_select, **kw)
@@ -1953,12 +1957,18 @@ class SQLCompiler(Compiled):
text += " VALUES (%s)" % \
', '.join([c[1] for c in crud_params])
- if self.returning and not self.returning_precedes_values:
+ if returning_clause and not self.returning_precedes_values:
text += " " + returning_clause
+ if self.ctes and toplevel:
+ text = self._render_cte_clause() + text
+
self.stack.pop(-1)
- return text
+ if asfrom:
+ return "(" + text + ")"
+ else:
+ return text
def update_limit_clause(self, update_stmt):
"""Provide a hook for MySQL to add LIMIT to the UPDATE"""
@@ -1972,8 +1982,8 @@ class SQLCompiler(Compiled):
MySQL overrides this.
"""
- return from_table._compiler_dispatch(self, asfrom=True,
- iscrud=True, **kw)
+ kw['asfrom'] = True
+ return from_table._compiler_dispatch(self, iscrud=True, **kw)
def update_from_clause(self, update_stmt,
from_table, extra_froms,
@@ -1990,14 +2000,14 @@ class SQLCompiler(Compiled):
fromhints=from_hints, **kw)
for t in extra_froms)
- def visit_update(self, update_stmt, **kw):
+ def visit_update(self, update_stmt, asfrom=False, **kw):
+ toplevel = not self.stack
+
self.stack.append(
{'correlate_froms': set([update_stmt.table]),
"asfrom_froms": set([update_stmt.table]),
"selectable": update_stmt})
- self.isupdate = True
-
extra_froms = update_stmt._extra_froms
text = "UPDATE "
@@ -2009,7 +2019,8 @@ class SQLCompiler(Compiled):
table_text = self.update_tables_clause(update_stmt, update_stmt.table,
extra_froms, **kw)
- crud_params = crud._get_crud_params(self, update_stmt, **kw)
+ crud_params = crud._setup_crud_params(
+ self, update_stmt, crud.ISUPDATE, **kw)
if update_stmt._hints:
dialect_hints, table_text = self._setup_crud_hints(
@@ -2029,11 +2040,9 @@ class SQLCompiler(Compiled):
)
if self.returning or update_stmt._returning:
- if not self.returning:
- self.returning = update_stmt._returning
if self.returning_precedes_values:
text += " " + self.returning_clause(
- update_stmt, self.returning)
+ update_stmt, self.returning or update_stmt._returning)
if extra_froms:
extra_from_text = self.update_from_clause(
@@ -2045,7 +2054,7 @@ class SQLCompiler(Compiled):
text += " " + extra_from_text
if update_stmt._whereclause is not None:
- t = self.process(update_stmt._whereclause)
+ t = self.process(update_stmt._whereclause, **kw)
if t:
text += " WHERE " + t
@@ -2053,23 +2062,33 @@ class SQLCompiler(Compiled):
if limit_clause:
text += " " + limit_clause
- if self.returning and not self.returning_precedes_values:
+ if (self.returning or update_stmt._returning) and \
+ not self.returning_precedes_values:
text += " " + self.returning_clause(
- update_stmt, self.returning)
+ update_stmt, self.returning or update_stmt._returning)
+
+ if self.ctes and toplevel:
+ text = self._render_cte_clause() + text
self.stack.pop(-1)
- return text
+ if asfrom:
+ return "(" + text + ")"
+ else:
+ return text
@util.memoized_property
def _key_getters_for_crud_column(self):
- return crud._key_getters_for_crud_column(self)
+ return crud._key_getters_for_crud_column(self, self.statement)
+
+ def visit_delete(self, delete_stmt, asfrom=False, **kw):
+ toplevel = not self.stack
- def visit_delete(self, delete_stmt, **kw):
self.stack.append({'correlate_froms': set([delete_stmt.table]),
"asfrom_froms": set([delete_stmt.table]),
"selectable": delete_stmt})
- self.isdelete = True
+
+ crud._setup_crud_params(self, delete_stmt, crud.ISDELETE, **kw)
text = "DELETE "
@@ -2088,23 +2107,28 @@ class SQLCompiler(Compiled):
text += table_text
if delete_stmt._returning:
- self.returning = delete_stmt._returning
if self.returning_precedes_values:
text += " " + self.returning_clause(
delete_stmt, delete_stmt._returning)
if delete_stmt._whereclause is not None:
- t = delete_stmt._whereclause._compiler_dispatch(self)
+ t = delete_stmt._whereclause._compiler_dispatch(self, **kw)
if t:
text += " WHERE " + t
- if self.returning and not self.returning_precedes_values:
+ if delete_stmt._returning and not self.returning_precedes_values:
text += " " + self.returning_clause(
delete_stmt, delete_stmt._returning)
+ if self.ctes and toplevel:
+ text = self._render_cte_clause() + text
+
self.stack.pop(-1)
- return text
+ if asfrom:
+ return "(" + text + ")"
+ else:
+ return text
def visit_savepoint(self, savepoint_stmt):
return "SAVEPOINT %s" % self.preparer.format_savepoint(savepoint_stmt)
diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py
index c5495ccde..70e03d220 100644
--- a/lib/sqlalchemy/sql/crud.py
+++ b/lib/sqlalchemy/sql/crud.py
@@ -1,5 +1,5 @@
# sql/crud.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -25,6 +25,41 @@ values present.
""")
+ISINSERT = util.symbol('ISINSERT')
+ISUPDATE = util.symbol('ISUPDATE')
+ISDELETE = util.symbol('ISDELETE')
+
+
+def _setup_crud_params(compiler, stmt, local_stmt_type, **kw):
+ restore_isinsert = compiler.isinsert
+ restore_isupdate = compiler.isupdate
+ restore_isdelete = compiler.isdelete
+
+ should_restore = (
+ restore_isinsert or restore_isupdate or restore_isdelete
+ ) or len(compiler.stack) > 1
+
+ if local_stmt_type is ISINSERT:
+ compiler.isupdate = False
+ compiler.isinsert = True
+ elif local_stmt_type is ISUPDATE:
+ compiler.isupdate = True
+ compiler.isinsert = False
+ elif local_stmt_type is ISDELETE:
+ if not should_restore:
+ compiler.isdelete = True
+ else:
+ assert False, "ISINSERT, ISUPDATE, or ISDELETE expected"
+
+ try:
+ if local_stmt_type in (ISINSERT, ISUPDATE):
+ return _get_crud_params(compiler, stmt, **kw)
+ finally:
+ if should_restore:
+ compiler.isinsert = restore_isinsert
+ compiler.isupdate = restore_isupdate
+ compiler.isdelete = restore_isdelete
+
def _get_crud_params(compiler, stmt, **kw):
"""create a set of tuples representing column/string pairs for use
@@ -59,7 +94,7 @@ def _get_crud_params(compiler, stmt, **kw):
# but in the case of mysql multi-table update, the rules for
# .key must conditionally take tablename into account
_column_as_key, _getattr_col_key, _col_bind_name = \
- _key_getters_for_crud_column(compiler)
+ _key_getters_for_crud_column(compiler, stmt)
# if we have statement parameters - set defaults in the
# compiled params
@@ -101,7 +136,7 @@ def _get_crud_params(compiler, stmt, **kw):
if parameters and stmt_parameters:
check = set(parameters).intersection(
- _column_as_key(k) for k in stmt.parameters
+ _column_as_key(k) for k in stmt_parameters
).difference(check_columns)
if check:
raise exc.CompileError(
@@ -117,26 +152,26 @@ def _get_crud_params(compiler, stmt, **kw):
def _create_bind_param(
compiler, col, value, process=True,
- required=False, name=None):
+ required=False, name=None, **kw):
if name is None:
name = col.key
bindparam = elements.BindParameter(
name, value, type_=col.type, required=required)
bindparam._is_crud = True
if process:
- bindparam = bindparam._compiler_dispatch(compiler)
+ bindparam = bindparam._compiler_dispatch(compiler, **kw)
return bindparam
-def _key_getters_for_crud_column(compiler):
- if compiler.isupdate and compiler.statement._extra_froms:
+def _key_getters_for_crud_column(compiler, stmt):
+ if compiler.isupdate and stmt._extra_froms:
# when extra tables are present, refer to the columns
# in those extra tables as table-qualified, including in
# dictionaries and when rendering bind param names.
# the "main" table of the statement remains unqualified,
# allowing the most compatibility with a non-multi-table
# statement.
- _et = set(compiler.statement._extra_froms)
+ _et = set(stmt._extra_froms)
def _column_as_key(key):
str_key = elements._column_as_key(key)
@@ -280,7 +315,8 @@ def _append_param_parameter(
compiler, c, value, required=value is REQUIRED,
name=_col_bind_name(c)
if not stmt._has_multi_parameters
- else "%s_0" % _col_bind_name(c)
+ else "%s_0" % _col_bind_name(c),
+ **kw
)
else:
if isinstance(value, elements.BindParameter) and \
@@ -608,7 +644,9 @@ def _get_returning_modifiers(compiler, stmt):
stmt.table.implicit_returning and
stmt._return_defaults)
else:
- implicit_return_defaults = False
+ # this line is unused, currently we are always
+ # isinsert or isupdate
+ implicit_return_defaults = False # pragma: no cover
if implicit_return_defaults:
if stmt._return_defaults is True:
diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py
index 7953b61b8..48f27b8b8 100644
--- a/lib/sqlalchemy/sql/ddl.py
+++ b/lib/sqlalchemy/sql/ddl.py
@@ -1,5 +1,5 @@
# sql/ddl.py
-# Copyright (C) 2009-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2009-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py
index ddb57da77..1bb1c344c 100644
--- a/lib/sqlalchemy/sql/default_comparator.py
+++ b/lib/sqlalchemy/sql/default_comparator.py
@@ -1,5 +1,5 @@
# sql/default_comparator.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -274,6 +274,7 @@ operator_lookup = {
"getitem": (_getitem_impl,),
"lshift": (_unsupported_impl,),
"rshift": (_unsupported_impl,),
+ "contains": (_unsupported_impl,),
}
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
index ddda109f6..8f368dcdb 100644
--- a/lib/sqlalchemy/sql/dml.py
+++ b/lib/sqlalchemy/sql/dml.py
@@ -1,5 +1,5 @@
# sql/dml.py
-# Copyright (C) 2009-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2009-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -9,15 +9,18 @@ Provide :class:`.Insert`, :class:`.Update` and :class:`.Delete`.
"""
-from .base import Executable, _generative, _from_objects, DialectKWArgs
+from .base import Executable, _generative, _from_objects, DialectKWArgs, \
+ ColumnCollection
from .elements import ClauseElement, _literal_as_text, Null, and_, _clone, \
_column_as_key
-from .selectable import _interpret_as_from, _interpret_as_select, HasPrefixes
+from .selectable import _interpret_as_from, _interpret_as_select, \
+ HasPrefixes, HasCTE
from .. import util
from .. import exc
-class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
+class UpdateBase(
+ HasCTE, DialectKWArgs, HasPrefixes, Executable, ClauseElement):
"""Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
"""
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index fe2fecce8..00c2c37ba 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -1,5 +1,5 @@
# sql/elements.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -646,6 +646,9 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
def _negate(self):
if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+ # TODO: see the note in AsBoolean that it seems to assume
+ # the element is the True_() / False_() constant, so this
+ # is too broad
return AsBoolean(self, operators.isfalse, operators.istrue)
else:
return super(ColumnElement, self)._negate()
@@ -2766,6 +2769,13 @@ class UnaryExpression(ColumnElement):
modifier=self.modifier,
type_=self.type,
wraps_column_expression=self.wraps_column_expression)
+ elif self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+ return UnaryExpression(
+ self.self_group(against=operators.inv),
+ operator=operators.inv,
+ type_=type_api.BOOLEANTYPE,
+ wraps_column_expression=self.wraps_column_expression,
+ negate=None)
else:
return ClauseElement._negate(self)
@@ -2875,6 +2885,9 @@ class AsBoolean(UnaryExpression):
return self
def _negate(self):
+ # TODO: this assumes the element is the True_() or False_()
+ # object, but this assumption isn't enforced and
+ # ColumnElement._negate() can send any number of expressions here
return self.element._negate()
@@ -4005,8 +4018,10 @@ def _cloned_difference(a, b):
if not all_overlap.intersection(elem._cloned_set))
-def _labeled(element):
- if not hasattr(element, 'name'):
+@util.dependencies("sqlalchemy.sql.functions")
+def _labeled(functions, element):
+ if not hasattr(element, 'name') or \
+ isinstance(element, functions.FunctionElement):
return element.label(None)
else:
return element
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 27fae8ca4..97f74d4e4 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -1,5 +1,5 @@
# sql/expression.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -16,12 +16,14 @@ class.
__all__ = [
'Alias', 'Any', 'All', 'ClauseElement', 'ColumnCollection', 'ColumnElement',
- 'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select',
+ 'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Lateral',
+ 'Select',
'Selectable', 'TableClause', 'Update', 'alias', 'and_', 'asc', 'between',
'bindparam', 'case', 'cast', 'column', 'delete', 'desc', 'distinct',
'except_', 'except_all', 'exists', 'extract', 'func', 'modifier',
'collate', 'insert', 'intersect', 'intersect_all', 'join', 'label',
- 'literal', 'literal_column', 'not_', 'null', 'nullsfirst', 'nullslast',
+ 'lateral', 'literal', 'literal_column', 'not_', 'null', 'nullsfirst',
+ 'nullslast',
'or_', 'outparam', 'outerjoin', 'over', 'select', 'subquery',
'table', 'text',
'tuple_', 'type_coerce', 'union', 'union_all', 'update', 'within_group']
@@ -45,9 +47,9 @@ from .base import ColumnCollection, Generative, Executable, \
PARSE_AUTOCOMMIT
from .selectable import Alias, Join, Select, Selectable, TableClause, \
- CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \
- alias, GenerativeSelect, \
- subquery, HasPrefixes, HasSuffixes, Exists, ScalarSelect, TextAsFrom
+ CompoundSelect, CTE, FromClause, FromGrouping, Lateral, SelectBase, \
+ alias, GenerativeSelect, subquery, HasCTE, HasPrefixes, HasSuffixes, \
+ lateral, Exists, ScalarSelect, TextAsFrom
from .dml import Insert, Update, Delete, UpdateBase, ValuesBase
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index 3c654bf67..e6df07056 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -1,5 +1,5 @@
# sql/functions.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -189,22 +189,24 @@ class FunctionElement(Executable, ColumnElement, FromClause):
:class:`.FunctionElement`.
This construct wraps the function in a named alias which
- is suitable for the FROM clause.
+ is suitable for the FROM clause, in the style accepted for example
+ by Postgresql.
e.g.::
from sqlalchemy.sql import column
- stmt = select([column('data')]).select_from(
- func.unnest(Table.data).alias('data_view')
+ stmt = select([column('data_view')]).\\
+ select_from(SomeTable).\\
+ select_from(func.unnest(SomeTable.data).alias('data_view')
)
Would produce:
.. sourcecode:: sql
- SELECT data
- FROM unnest(sometable.data) AS data_view
+ SELECT data_view
+ FROM sometable, unnest(sometable.data) AS data_view
.. versionadded:: 0.9.8 The :meth:`.FunctionElement.alias` method
is now supported. Previously, this method's behavior was
diff --git a/lib/sqlalchemy/sql/naming.py b/lib/sqlalchemy/sql/naming.py
index bc13835ed..2a1a832a4 100644
--- a/lib/sqlalchemy/sql/naming.py
+++ b/lib/sqlalchemy/sql/naming.py
@@ -1,5 +1,5 @@
# sqlalchemy/naming.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index f4f90b664..80f08a97c 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -1,5 +1,5 @@
# sql/operators.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -14,7 +14,7 @@ from .. import util
from operator import (
and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg,
- getitem, lshift, rshift
+ getitem, lshift, rshift, contains
)
if util.py2k:
@@ -335,6 +335,9 @@ class ColumnOperators(Operators):
"""
return self.operate(neg)
+ def __contains__(self, other):
+ return self.operate(contains, other)
+
def __getitem__(self, index):
"""Implement the [] operator.
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index 0626cb2b4..5e709b1e3 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -1,5 +1,5 @@
# sql/schema.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index 1955fc934..e299f067e 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -1,5 +1,5 @@
# sql/selectable.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -161,6 +161,28 @@ def alias(selectable, name=None, flat=False):
return selectable.alias(name=name, flat=flat)
+def lateral(selectable, name=None):
+ """Return a :class:`.Lateral` object.
+
+ :class:`.Lateral` is an :class:`.Alias` subclass that represents
+ a subquery with the LATERAL keyword applied to it.
+
+ The special behavior of a LATERAL subquery is that it appears in the
+ FROM clause of an enclosing SELECT, but may correlate to other
+ FROM clauses of that SELECT. It is a special case of subquery
+ only supported by a small number of backends, currently more recent
+ Postgresql versions.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`lateral_selects` - overview of usage.
+
+ """
+ return selectable.lateral(name=name)
+
+
class Selectable(ClauseElement):
"""mark a class as being selectable"""
__visit_name__ = 'selectable'
@@ -313,7 +335,7 @@ class FromClause(Selectable):
return Select([self], whereclause, **params)
- def join(self, right, onclause=None, isouter=False):
+ def join(self, right, onclause=None, isouter=False, full=False):
"""Return a :class:`.Join` from this :class:`.FromClause`
to another :class:`FromClause`.
@@ -341,6 +363,11 @@ class FromClause(Selectable):
:param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN.
+ :param full: if True, render a FULL OUTER JOIN, instead of LEFT OUTER
+ JOIN. Implies :paramref:`.FromClause.join.isouter`.
+
+ .. versionadded:: 1.1
+
.. seealso::
:func:`.join` - standalone function
@@ -349,9 +376,9 @@ class FromClause(Selectable):
"""
- return Join(self, right, onclause, isouter)
+ return Join(self, right, onclause, isouter, full)
- def outerjoin(self, right, onclause=None):
+ def outerjoin(self, right, onclause=None, full=False):
"""Return a :class:`.Join` from this :class:`.FromClause`
to another :class:`FromClause`, with the "isouter" flag set to
True.
@@ -379,6 +406,11 @@ class FromClause(Selectable):
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
join the two tables based on a foreign key relationship.
+ :param full: if True, render a FULL OUTER JOIN, instead of
+ LEFT OUTER JOIN.
+
+ .. versionadded:: 1.1
+
.. seealso::
:meth:`.FromClause.join`
@@ -387,7 +419,7 @@ class FromClause(Selectable):
"""
- return Join(self, right, onclause, True)
+ return Join(self, right, onclause, True, full)
def alias(self, name=None, flat=False):
"""return an alias of this :class:`.FromClause`.
@@ -403,6 +435,21 @@ class FromClause(Selectable):
return Alias(self, name)
+ def lateral(self, name=None):
+ """Return a LATERAL alias of this :class:`.FromClause`.
+
+ The return value is the :class:`.Lateral` construct also
+ provided by the top-level :func:`~.expression.lateral` function.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`lateral_selects` - overview of usage.
+
+ """
+ return Lateral(self, name)
+
def is_derived_from(self, fromclause):
"""Return True if this FromClause is 'derived' from the given
FromClause.
@@ -648,7 +695,7 @@ class Join(FromClause):
_is_join = True
- def __init__(self, left, right, onclause=None, isouter=False):
+ def __init__(self, left, right, onclause=None, isouter=False, full=False):
"""Construct a new :class:`.Join`.
The usual entrypoint here is the :func:`~.expression.join`
@@ -665,9 +712,10 @@ class Join(FromClause):
self.onclause = onclause
self.isouter = isouter
+ self.full = full
@classmethod
- def _create_outerjoin(cls, left, right, onclause=None):
+ def _create_outerjoin(cls, left, right, onclause=None, full=False):
"""Return an ``OUTER JOIN`` clause element.
The returned object is an instance of :class:`.Join`.
@@ -689,10 +737,11 @@ class Join(FromClause):
:class:`.Join` object.
"""
- return cls(left, right, onclause, isouter=True)
+ return cls(left, right, onclause, isouter=True, full=full)
@classmethod
- def _create_join(cls, left, right, onclause=None, isouter=False):
+ def _create_join(cls, left, right, onclause=None, isouter=False,
+ full=False):
"""Produce a :class:`.Join` object, given two :class:`.FromClause`
expressions.
@@ -724,6 +773,10 @@ class Join(FromClause):
:param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN.
+ :param full: if True, render a FULL OUTER JOIN, instead of JOIN.
+
+ .. versionadded:: 1.1
+
.. seealso::
:meth:`.FromClause.join` - method form, based on a given left side
@@ -732,7 +785,7 @@ class Join(FromClause):
"""
- return cls(left, right, onclause, isouter)
+ return cls(left, right, onclause, isouter, full)
@property
def description(self):
@@ -1050,7 +1103,7 @@ class Join(FromClause):
chain(sqlutil.ClauseAdapter(right_a))
return left_a.join(right_a, adapter.traverse(self.onclause),
- isouter=self.isouter)
+ isouter=self.isouter, full=self.full)
else:
return self.select(use_labels=True, correlate=False).alias(name)
@@ -1170,6 +1223,27 @@ class Alias(FromClause):
return self.element.bind
+class Lateral(Alias):
+ """Represent a LATERAL subquery.
+
+ This object is constructed from the :func:`~.expression.lateral` module
+ level function as well as the :meth:`.FromClause.lateral` method available
+ on all :class:`.FromClause` subclasses.
+
+ While LATERAL is part of the SQL standard, curently only more recent
+ Postgresql versions provide support for this keyword.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`lateral_selects` - overview of usage.
+
+ """
+
+ __visit_name__ = 'lateral'
+
+
class CTE(Generative, HasSuffixes, Alias):
"""Represent a Common Table Expression.
@@ -1195,6 +1269,15 @@ class CTE(Generative, HasSuffixes, Alias):
self._suffixes = _suffixes
super(CTE, self).__init__(selectable, name=name)
+ @util.dependencies("sqlalchemy.sql.dml")
+ def _populate_column_collection(self, dml):
+ if isinstance(self.element, dml.UpdateBase):
+ for col in self.element._returning:
+ col._make_proxy(self)
+ else:
+ for col in self.element.columns._all_columns:
+ col._make_proxy(self)
+
def alias(self, name=None, flat=False):
return CTE(
self.original,
@@ -1223,6 +1306,164 @@ class CTE(Generative, HasSuffixes, Alias):
)
+class HasCTE(object):
+ """Mixin that declares a class to include CTE support.
+
+ .. versionadded:: 1.1
+
+ """
+
+ def cte(self, name=None, recursive=False):
+ """Return a new :class:`.CTE`, or Common Table Expression instance.
+
+ Common table expressions are a SQL standard whereby SELECT
+ statements can draw upon secondary statements specified along
+ with the primary statement, using a clause called "WITH".
+ Special semantics regarding UNION can also be employed to
+ allow "recursive" queries, where a SELECT statement can draw
+ upon the set of rows that have previously been selected.
+
+ CTEs can also be applied to DML constructs UPDATE, INSERT
+ and DELETE on some databases, both as a source of CTE rows
+ when combined with RETURNING, as well as a consumer of
+ CTE rows.
+
+ SQLAlchemy detects :class:`.CTE` objects, which are treated
+ similarly to :class:`.Alias` objects, as special elements
+ to be delivered to the FROM clause of the statement as well
+ as to a WITH clause at the top of the statement.
+
+ .. versionchanged:: 1.1 Added support for UPDATE/INSERT/DELETE as
+ CTE, CTEs added to UPDATE/INSERT/DELETE.
+
+ :param name: name given to the common table expression. Like
+ :meth:`._FromClause.alias`, the name can be left as ``None``
+ in which case an anonymous symbol will be used at query
+ compile time.
+ :param recursive: if ``True``, will render ``WITH RECURSIVE``.
+ A recursive common table expression is intended to be used in
+ conjunction with UNION ALL in order to derive rows
+ from those already selected.
+
+ The following examples include two from Postgresql's documentation at
+ http://www.postgresql.org/docs/current/static/queries-with.html,
+ as well as additional examples.
+
+ Example 1, non recursive::
+
+ from sqlalchemy import (Table, Column, String, Integer,
+ MetaData, select, func)
+
+ metadata = MetaData()
+
+ orders = Table('orders', metadata,
+ Column('region', String),
+ Column('amount', Integer),
+ Column('product', String),
+ Column('quantity', Integer)
+ )
+
+ regional_sales = select([
+ orders.c.region,
+ func.sum(orders.c.amount).label('total_sales')
+ ]).group_by(orders.c.region).cte("regional_sales")
+
+
+ top_regions = select([regional_sales.c.region]).\\
+ where(
+ regional_sales.c.total_sales >
+ select([
+ func.sum(regional_sales.c.total_sales)/10
+ ])
+ ).cte("top_regions")
+
+ statement = select([
+ orders.c.region,
+ orders.c.product,
+ func.sum(orders.c.quantity).label("product_units"),
+ func.sum(orders.c.amount).label("product_sales")
+ ]).where(orders.c.region.in_(
+ select([top_regions.c.region])
+ )).group_by(orders.c.region, orders.c.product)
+
+ result = conn.execute(statement).fetchall()
+
+ Example 2, WITH RECURSIVE::
+
+ from sqlalchemy import (Table, Column, String, Integer,
+ MetaData, select, func)
+
+ metadata = MetaData()
+
+ parts = Table('parts', metadata,
+ Column('part', String),
+ Column('sub_part', String),
+ Column('quantity', Integer),
+ )
+
+ included_parts = select([
+ parts.c.sub_part,
+ parts.c.part,
+ parts.c.quantity]).\\
+ where(parts.c.part=='our part').\\
+ cte(recursive=True)
+
+
+ incl_alias = included_parts.alias()
+ parts_alias = parts.alias()
+ included_parts = included_parts.union_all(
+ select([
+ parts_alias.c.sub_part,
+ parts_alias.c.part,
+ parts_alias.c.quantity
+ ]).
+ where(parts_alias.c.part==incl_alias.c.sub_part)
+ )
+
+ statement = select([
+ included_parts.c.sub_part,
+ func.sum(included_parts.c.quantity).
+ label('total_quantity')
+ ]).\\
+ group_by(included_parts.c.sub_part)
+
+ result = conn.execute(statement).fetchall()
+
+ Example 3, an upsert using UPDATE and INSERT with CTEs::
+
+ orders = table(
+ 'orders',
+ column('region'),
+ column('amount'),
+ column('product'),
+ column('quantity')
+ )
+
+ upsert = (
+ orders.update()
+ .where(orders.c.region == 'Region1')
+ .values(amount=1.0, product='Product1', quantity=1)
+ .returning(*(orders.c._all_columns)).cte('upsert'))
+
+ insert = orders.insert().from_select(
+ orders.c.keys(),
+ select([
+ literal('Region1'), literal(1.0),
+ literal('Product1'), literal(1)
+ ).where(exists(upsert.select()))
+ )
+
+ connection.execute(insert)
+
+ .. seealso::
+
+ :meth:`.orm.query.Query.cte` - ORM version of
+ :meth:`.HasCTE.cte`.
+
+ """
+ return CTE(self, name=name, recursive=recursive)
+
+
class FromGrouping(FromClause):
"""Represent a grouping of a FROM clause"""
__visit_name__ = 'grouping'
@@ -1497,7 +1738,7 @@ class ForUpdateArg(ClauseElement):
self.of = None
-class SelectBase(Executable, FromClause):
+class SelectBase(HasCTE, Executable, FromClause):
"""Base class for SELECT statements.
@@ -1531,125 +1772,6 @@ class SelectBase(Executable, FromClause):
"""
return self.as_scalar().label(name)
- def cte(self, name=None, recursive=False):
- """Return a new :class:`.CTE`, or Common Table Expression instance.
-
- Common table expressions are a SQL standard whereby SELECT
- statements can draw upon secondary statements specified along
- with the primary statement, using a clause called "WITH".
- Special semantics regarding UNION can also be employed to
- allow "recursive" queries, where a SELECT statement can draw
- upon the set of rows that have previously been selected.
-
- SQLAlchemy detects :class:`.CTE` objects, which are treated
- similarly to :class:`.Alias` objects, as special elements
- to be delivered to the FROM clause of the statement as well
- as to a WITH clause at the top of the statement.
-
- .. versionadded:: 0.7.6
-
- :param name: name given to the common table expression. Like
- :meth:`._FromClause.alias`, the name can be left as ``None``
- in which case an anonymous symbol will be used at query
- compile time.
- :param recursive: if ``True``, will render ``WITH RECURSIVE``.
- A recursive common table expression is intended to be used in
- conjunction with UNION ALL in order to derive rows
- from those already selected.
-
- The following examples illustrate two examples from
- Postgresql's documentation at
- http://www.postgresql.org/docs/8.4/static/queries-with.html.
-
- Example 1, non recursive::
-
- from sqlalchemy import (Table, Column, String, Integer,
- MetaData, select, func)
-
- metadata = MetaData()
-
- orders = Table('orders', metadata,
- Column('region', String),
- Column('amount', Integer),
- Column('product', String),
- Column('quantity', Integer)
- )
-
- regional_sales = select([
- orders.c.region,
- func.sum(orders.c.amount).label('total_sales')
- ]).group_by(orders.c.region).cte("regional_sales")
-
-
- top_regions = select([regional_sales.c.region]).\\
- where(
- regional_sales.c.total_sales >
- select([
- func.sum(regional_sales.c.total_sales)/10
- ])
- ).cte("top_regions")
-
- statement = select([
- orders.c.region,
- orders.c.product,
- func.sum(orders.c.quantity).label("product_units"),
- func.sum(orders.c.amount).label("product_sales")
- ]).where(orders.c.region.in_(
- select([top_regions.c.region])
- )).group_by(orders.c.region, orders.c.product)
-
- result = conn.execute(statement).fetchall()
-
- Example 2, WITH RECURSIVE::
-
- from sqlalchemy import (Table, Column, String, Integer,
- MetaData, select, func)
-
- metadata = MetaData()
-
- parts = Table('parts', metadata,
- Column('part', String),
- Column('sub_part', String),
- Column('quantity', Integer),
- )
-
- included_parts = select([
- parts.c.sub_part,
- parts.c.part,
- parts.c.quantity]).\\
- where(parts.c.part=='our part').\\
- cte(recursive=True)
-
-
- incl_alias = included_parts.alias()
- parts_alias = parts.alias()
- included_parts = included_parts.union_all(
- select([
- parts_alias.c.sub_part,
- parts_alias.c.part,
- parts_alias.c.quantity
- ]).
- where(parts_alias.c.part==incl_alias.c.sub_part)
- )
-
- statement = select([
- included_parts.c.sub_part,
- func.sum(included_parts.c.quantity).
- label('total_quantity')
- ]).\\
- group_by(included_parts.c.sub_part)
-
- result = conn.execute(statement).fetchall()
-
-
- .. seealso::
-
- :meth:`.orm.query.Query.cte` - ORM version of
- :meth:`.SelectBase.cte`.
-
- """
- return CTE(self, name=name, recursive=recursive)
-
@_generative
@util.deprecated('0.6',
message="``autocommit()`` is deprecated. Use "
diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py
index 84bfca026..81630fe4f 100644
--- a/lib/sqlalchemy/sql/sqltypes.py
+++ b/lib/sqlalchemy/sql/sqltypes.py
@@ -1,5 +1,5 @@
# sql/sqltypes.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -1082,11 +1082,52 @@ class Enum(String, SchemaType):
"""Generic Enum Type.
- The Enum type provides a set of possible string values which the
- column is constrained towards.
+ The :class:`.Enum` type provides a set of possible string values
+ which the column is constrained towards.
+
+ The :class:`.Enum` type will make use of the backend's native "ENUM"
+ type if one is available; otherwise, it uses a VARCHAR datatype and
+ produces a CHECK constraint. Use of the backend-native enum type
+ can be disabled using the :paramref:`.Enum.native_enum` flag, and
+ the production of the CHECK constraint is configurable using the
+ :paramref:`.Enum.create_constraint` flag.
+
+ The :class:`.Enum` type also provides in-Python validation of both
+ input values and database-returned values. A ``LookupError`` is raised
+ for any Python value that's not located in the given list of possible
+ values.
+
+ .. versionchanged:: 1.1 the :class:`.Enum` type now provides in-Python
+ validation of input values as well as on data being returned by
+ the database.
+
+ The source of enumerated values may be a list of string values, or
+ alternatively a PEP-435-compliant enumerated class. For the purposes
+ of the :class:`.Enum` datatype, this class need only provide a
+ ``__members__`` method.
+
+ When using an enumerated class, the enumerated objects are used
+ both for input and output, rather than strings as is the case with
+ a plain-string enumerated type::
+
+ import enum
+ class MyEnum(enum.Enum):
+ one = "one"
+ two = "two"
+ three = "three"
+
+
+ t = Table(
+ 'data', MetaData(),
+ Column('value', Enum(MyEnum))
+ )
+
+ connection.execute(t.insert(), {"value": MyEnum.two})
+ assert connection.scalar(t.select()) is MyEnum.two
+
+ .. versionadded:: 1.1 - support for PEP-435-style enumerated
+ classes.
- By default, uses the backend's native ENUM type if available,
- else uses VARCHAR + a CHECK constraint.
.. seealso::
@@ -1103,13 +1144,25 @@ class Enum(String, SchemaType):
Keyword arguments which don't apply to a specific backend are ignored
by that backend.
- :param \*enums: string or unicode enumeration labels. If unicode
+ :param \*enums: either exactly one PEP-435 compliant enumerated type
+ or one or more string or unicode enumeration labels. If unicode
labels are present, the `convert_unicode` flag is auto-enabled.
+ .. versionadded:: 1.1 a PEP-435 style enumerated class may be
+ passed.
+
:param convert_unicode: Enable unicode-aware bind parameter and
result-set processing for this Enum's data. This is set
automatically based on the presence of unicode label strings.
+ :param create_constraint: defaults to True. When creating a non-native
+ enumerated type, also build a CHECK constraint on the database
+ against the valid values.
+
+ .. versionadded:: 1.1 - added :paramref:`.Enum.create_constraint`
+ which provides the option to disable the production of the
+ CHECK constraint for a non-native enumerated type.
+
:param metadata: Associate this type directly with a ``MetaData``
object. For types that exist on the target database as an
independent schema construct (Postgresql), this type will be
@@ -1124,7 +1177,9 @@ class Enum(String, SchemaType):
:param name: The name of this type. This is required for Postgresql
and any future supported database which requires an explicitly
named type, or an explicitly named constraint in order to generate
- the type and/or a table that uses it.
+ the type and/or a table that uses it. If a PEP-435 enumerated
+ class was used, its name (converted to lower case) is used by
+ default.
:param native_enum: Use the database's native ENUM type when
available. Defaults to True. When False, uses VARCHAR + check
@@ -1150,14 +1205,16 @@ class Enum(String, SchemaType):
``schema`` attribute. This also takes effect when using the
:meth:`.Table.tometadata` operation.
- .. versionadded:: 0.8
-
"""
- self.enums = enums
+
+ values, objects = self._parse_into_values(enums, kw)
+ self._setup_for_values(values, objects, kw)
+
self.native_enum = kw.pop('native_enum', True)
convert_unicode = kw.pop('convert_unicode', None)
+ self.create_constraint = kw.pop('create_constraint', True)
if convert_unicode is None:
- for e in enums:
+ for e in self.enums:
if isinstance(e, util.text_type):
convert_unicode = True
break
@@ -1168,12 +1225,53 @@ class Enum(String, SchemaType):
length = max(len(x) for x in self.enums)
else:
length = 0
+ self._valid_lookup[None] = self._object_lookup[None] = None
+
String.__init__(self,
length=length,
convert_unicode=convert_unicode,
)
SchemaType.__init__(self, **kw)
+ def _parse_into_values(self, enums, kw):
+ if len(enums) == 1 and hasattr(enums[0], '__members__'):
+ self.enum_class = enums[0]
+ values = list(self.enum_class.__members__)
+ objects = [self.enum_class.__members__[k] for k in values]
+ kw.setdefault('name', self.enum_class.__name__.lower())
+
+ return values, objects
+ else:
+ self.enum_class = None
+ return enums, enums
+
+ def _setup_for_values(self, values, objects, kw):
+ self.enums = list(values)
+
+ self._valid_lookup = dict(
+ zip(objects, values)
+ )
+ self._object_lookup = dict(
+ (value, key) for key, value in self._valid_lookup.items()
+ )
+ self._valid_lookup.update(
+ [(value, value) for value in self._valid_lookup.values()]
+ )
+
+ def _db_value_for_elem(self, elem):
+ try:
+ return self._valid_lookup[elem]
+ except KeyError:
+ raise LookupError(
+ '"%s" is not among the defined enum values' % elem)
+
+ def _object_value_for_elem(self, elem):
+ try:
+ return self._object_lookup[elem]
+ except KeyError:
+ raise LookupError(
+ '"%s" is not among the defined enum values' % elem)
+
def __repr__(self):
return util.generic_repr(self,
additional_kw=[('native_enum', True)],
@@ -1189,6 +1287,9 @@ class Enum(String, SchemaType):
if self.native_enum:
SchemaType._set_table(self, column, table)
+ if not self.create_constraint:
+ return
+
e = schema.CheckConstraint(
type_coerce(column, self).in_(self.enums),
name=_defer_name(self.name),
@@ -1203,6 +1304,10 @@ class Enum(String, SchemaType):
metadata = kw.pop('metadata', self.metadata)
_create_events = kw.pop('_create_events', False)
if issubclass(impltype, Enum):
+ if self.enum_class is not None:
+ args = [self.enum_class]
+ else:
+ args = self.enums
return impltype(name=self.name,
schema=schema,
metadata=metadata,
@@ -1210,15 +1315,54 @@ class Enum(String, SchemaType):
native_enum=self.native_enum,
inherit_schema=self.inherit_schema,
_create_events=_create_events,
- *self.enums,
+ *args,
**kw)
else:
# TODO: why would we be here?
return super(Enum, self).adapt(impltype, **kw)
+ def literal_processor(self, dialect):
+ parent_processor = super(Enum, self).literal_processor(dialect)
-class PickleType(TypeDecorator):
+ def process(value):
+ value = self._db_value_for_elem(value)
+ if parent_processor:
+ value = parent_processor(value)
+ return value
+ return process
+
+ def bind_processor(self, dialect):
+ def process(value):
+ value = self._db_value_for_elem(value)
+ if parent_processor:
+ value = parent_processor(value)
+ return value
+
+ parent_processor = super(Enum, self).bind_processor(dialect)
+ return process
+ def result_processor(self, dialect, coltype):
+ parent_processor = super(Enum, self).result_processor(
+ dialect, coltype)
+
+ def process(value):
+ if parent_processor:
+ value = parent_processor(value)
+
+ value = self._object_value_for_elem(value)
+ return value
+
+ return process
+
+ @property
+ def python_type(self):
+ if self.enum_class:
+ return self.enum_class
+ else:
+ return super(Enum, self).python_type
+
+
+class PickleType(TypeDecorator):
"""Holds Python objects, which are serialized using pickle.
PickleType builds upon the Binary type to apply Python's
diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py
index c367bc73e..2a7adf8af 100644
--- a/lib/sqlalchemy/sql/type_api.py
+++ b/lib/sqlalchemy/sql/type_api.py
@@ -1,5 +1,5 @@
# sql/types_api.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index f5aa9f228..5f180646c 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -1,5 +1,5 @@
# sql/util.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -176,6 +176,28 @@ def unwrap_order_by(clause):
return result
+def expand_column_list_from_order_by(collist, order_by):
+ """Given the columns clause and ORDER BY of a selectable,
+ return a list of column expressions that can be added to the collist
+ corresponding to the ORDER BY, without repeating those already
+ in the collist.
+
+ """
+ cols_already_present = set([
+ col.element if col._order_by_label_element is not None
+ else col for col in collist
+ ])
+
+ return [
+ col for col in
+ chain(*[
+ unwrap_order_by(o)
+ for o in order_by
+ ])
+ if col not in cols_already_present
+ ]
+
+
def clause_is_present(clause, search):
"""Given a target clause and a second to search within, return True
if the target is plainly present in the search without any
@@ -257,28 +279,128 @@ def _quote_ddl_expr(element):
return repr(element)
-class _repr_params(object):
- """A string view of bound parameters, truncating
- display to the given number of 'multi' parameter sets.
+class _repr_base(object):
+ _LIST = 0
+ _TUPLE = 1
+ _DICT = 2
+
+ __slots__ = 'max_chars',
+
+ def trunc(self, value):
+ rep = repr(value)
+ lenrep = len(rep)
+ if lenrep > self.max_chars:
+ segment_length = self.max_chars // 2
+ rep = (
+ rep[0:segment_length] +
+ (" ... (%d characters truncated) ... "
+ % (lenrep - self.max_chars)) +
+ rep[-segment_length:]
+ )
+ return rep
+
+
+class _repr_row(_repr_base):
+ """Provide a string view of a row."""
+
+ __slots__ = 'row',
+
+ def __init__(self, row, max_chars=300):
+ self.row = row
+ self.max_chars = max_chars
+
+ def __repr__(self):
+ trunc = self.trunc
+ return "(%s%s)" % (
+ ", ".join(trunc(value) for value in self.row),
+ "," if len(self.row) == 1 else ""
+ )
+
+
+class _repr_params(_repr_base):
+ """Provide a string view of bound parameters.
+
+ Truncates display to a given numnber of 'multi' parameter sets,
+ as well as long values to a given number of characters.
"""
- def __init__(self, params, batches):
+ __slots__ = 'params', 'batches',
+
+ def __init__(self, params, batches, max_chars=300):
self.params = params
self.batches = batches
+ self.max_chars = max_chars
def __repr__(self):
- if isinstance(self.params, (list, tuple)) and \
- len(self.params) > self.batches and \
- isinstance(self.params[0], (list, dict, tuple)):
+ if isinstance(self.params, list):
+ typ = self._LIST
+ ismulti = self.params and isinstance(
+ self.params[0], (list, dict, tuple))
+ elif isinstance(self.params, tuple):
+ typ = self._TUPLE
+ ismulti = self.params and isinstance(
+ self.params[0], (list, dict, tuple))
+ elif isinstance(self.params, dict):
+ typ = self._DICT
+ ismulti = False
+ else:
+ return self.trunc(self.params)
+
+ if ismulti and len(self.params) > self.batches:
msg = " ... displaying %i of %i total bound parameter sets ... "
return ' '.join((
- repr(self.params[:self.batches - 2])[0:-1],
+ self._repr_multi(self.params[:self.batches - 2], typ)[0:-1],
msg % (self.batches, len(self.params)),
- repr(self.params[-2:])[1:]
+ self._repr_multi(self.params[-2:], typ)[1:]
))
+ elif ismulti:
+ return self._repr_multi(self.params, typ)
+ else:
+ return self._repr_params(self.params, typ)
+
+ def _repr_multi(self, multi_params, typ):
+ if multi_params:
+ if isinstance(multi_params[0], list):
+ elem_type = self._LIST
+ elif isinstance(multi_params[0], tuple):
+ elem_type = self._TUPLE
+ elif isinstance(multi_params[0], dict):
+ elem_type = self._DICT
+ else:
+ assert False, \
+ "Unknown parameter type %s" % (type(multi_params[0]))
+
+ elements = ", ".join(
+ self._repr_params(params, elem_type)
+ for params in multi_params)
+ else:
+ elements = ""
+
+ if typ == self._LIST:
+ return "[%s]" % elements
+ else:
+ return "(%s)" % elements
+
+ def _repr_params(self, params, typ):
+ trunc = self.trunc
+ if typ is self._DICT:
+ return "{%s}" % (
+ ", ".join(
+ "%r: %s" % (key, trunc(value))
+ for key, value in params.items()
+ )
+ )
+ elif typ is self._TUPLE:
+ return "(%s%s)" % (
+ ", ".join(trunc(value) for value in params),
+ "," if len(params) == 1 else ""
+
+ )
else:
- return repr(self.params)
+ return "[%s]" % (
+ ", ".join(trunc(value) for value in params)
+ )
def adapt_criterion_to_null(crit, nulls):
diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py
index 0540ac5d3..d12213e35 100644
--- a/lib/sqlalchemy/sql/visitors.py
+++ b/lib/sqlalchemy/sql/visitors.py
@@ -1,5 +1,5 @@
# sql/visitors.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py
index d24f31321..f4a23d238 100644
--- a/lib/sqlalchemy/testing/__init__.py
+++ b/lib/sqlalchemy/testing/__init__.py
@@ -1,5 +1,5 @@
# testing/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -22,7 +22,7 @@ from .assertions import emits_warning, emits_warning_on, uses_deprecated, \
eq_, ne_, le_, is_, is_not_, startswith_, assert_raises, \
assert_raises_message, AssertsCompiledSQL, ComparesTables, \
AssertsExecutionResults, expect_deprecated, expect_warnings, \
- in_, not_in_, eq_ignore_whitespace
+ in_, not_in_, eq_ignore_whitespace, eq_regex
from .util import run_as_contextmanager, rowset, fail, \
provide_metadata, adict, force_drop_names, \
diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py
index 8c962d7a3..ea50c0738 100644
--- a/lib/sqlalchemy/testing/assertions.py
+++ b/lib/sqlalchemy/testing/assertions.py
@@ -1,5 +1,5 @@
# testing/assertions.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -204,6 +204,10 @@ def _assert_no_stray_pool_connections():
_STRAY_CONNECTION_FAILURES = 0
+def eq_regex(a, b, msg=None):
+ assert re.match(b, a), msg or "%r !~ %r" % (a, b)
+
+
def eq_(a, b, msg=None):
"""Assert a == b, with repr messaging on failure."""
assert a == b, msg or "%r != %r" % (a, b)
@@ -296,6 +300,8 @@ class AssertsCompiledSQL(object):
dialect = config.db.dialect
elif dialect == 'default':
dialect = default.DefaultDialect()
+ elif dialect == 'default_enhanced':
+ dialect = default.StrCompileDialect()
elif isinstance(dialect, util.string_types):
dialect = url.URL(dialect).get_dialect()()
diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py
index 56c422cf1..0aae12dcc 100644
--- a/lib/sqlalchemy/testing/assertsql.py
+++ b/lib/sqlalchemy/testing/assertsql.py
@@ -1,5 +1,5 @@
# testing/assertsql.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py
index a3d6e1690..da5997661 100644
--- a/lib/sqlalchemy/testing/config.py
+++ b/lib/sqlalchemy/testing/config.py
@@ -1,5 +1,5 @@
# testing/config.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 1eaf62960..def9f3c14 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -1,5 +1,5 @@
# testing/engines.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py
index 65178ea5b..a5d04decc 100644
--- a/lib/sqlalchemy/testing/entities.py
+++ b/lib/sqlalchemy/testing/entities.py
@@ -1,5 +1,5 @@
# testing/entities.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index 5d7baeb9c..b672656a0 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -1,5 +1,5 @@
# testing/exclusions.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/fixtures.py b/lib/sqlalchemy/testing/fixtures.py
index 5cd0244ef..d6712b461 100644
--- a/lib/sqlalchemy/testing/fixtures.py
+++ b/lib/sqlalchemy/testing/fixtures.py
@@ -1,5 +1,5 @@
# testing/fixtures.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/mock.py b/lib/sqlalchemy/testing/mock.py
index c836bb407..674f0851a 100644
--- a/lib/sqlalchemy/testing/mock.py
+++ b/lib/sqlalchemy/testing/mock.py
@@ -1,5 +1,5 @@
# testing/mock.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/pickleable.py b/lib/sqlalchemy/testing/pickleable.py
index 7b696ad67..d6814a13a 100644
--- a/lib/sqlalchemy/testing/pickleable.py
+++ b/lib/sqlalchemy/testing/pickleable.py
@@ -1,5 +1,5 @@
# testing/pickleable.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py
index 4c390d409..9fc5848fb 100644
--- a/lib/sqlalchemy/testing/plugin/noseplugin.py
+++ b/lib/sqlalchemy/testing/plugin/noseplugin.py
@@ -1,5 +1,5 @@
# plugin/noseplugin.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py
index 6cdec05ad..fd33c56a0 100644
--- a/lib/sqlalchemy/testing/plugin/plugin_base.py
+++ b/lib/sqlalchemy/testing/plugin/plugin_base.py
@@ -1,5 +1,5 @@
# plugin/plugin_base.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -174,6 +174,7 @@ def post_begin():
warnings.setup_filters()
+
def _log(opt_str, value, parser):
global logging
if not logging:
@@ -427,15 +428,23 @@ def stop_test_class(cls):
#from sqlalchemy import inspect
#assert not inspect(testing.db).get_table_names()
engines.testing_reaper._stop_test_ctx()
- if not options.low_connections:
- assertions.global_cleanup_assertions()
- _restore_engine()
+ try:
+ if not options.low_connections:
+ assertions.global_cleanup_assertions()
+ finally:
+ _restore_engine()
def _restore_engine():
config._current.reset(testing)
+def final_process_cleanup():
+ engines.testing_reaper._stop_test_ctx_aggressive()
+ assertions.global_cleanup_assertions()
+ _restore_engine()
+
+
def _setup_engine(cls):
if getattr(cls, '__engine_options__', None):
eng = engines.testing_engine(options=cls.__engine_options__)
diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py
index 5bb6b966d..0bd79eea0 100644
--- a/lib/sqlalchemy/testing/plugin/pytestplugin.py
+++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py
@@ -54,6 +54,10 @@ def pytest_configure(config):
def pytest_sessionstart(session):
plugin_base.post_begin()
+
+def pytest_sessionfinish(session):
+ plugin_base.final_process_cleanup()
+
if has_xdist:
import uuid
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index a152d5e93..a88cd21ad 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -1,5 +1,5 @@
# testing/profiling.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py
index 3f9ddae73..b3812cf04 100644
--- a/lib/sqlalchemy/testing/provision.py
+++ b/lib/sqlalchemy/testing/provision.py
@@ -1,8 +1,12 @@
from sqlalchemy.engine import url as sa_url
from sqlalchemy import text
+from sqlalchemy import exc
from sqlalchemy.util import compat
from . import config, engines
import os
+import time
+import logging
+log = logging.getLogger(__name__)
FOLLOWER_IDENT = None
@@ -158,7 +162,18 @@ def _pg_create_db(cfg, eng, ident):
except Exception:
pass
currentdb = conn.scalar("select current_database()")
- conn.execute("CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb))
+ for attempt in range(3):
+ try:
+ conn.execute(
+ "CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb))
+ except exc.OperationalError as err:
+ if attempt != 2 and "accessed by other users" in str(err):
+ time.sleep(.2)
+ continue
+ else:
+ raise
+ else:
+ break
@_create_db.for_db("mysql")
@@ -208,19 +223,86 @@ def _sqlite_drop_db(cfg, eng, ident):
@_drop_db.for_db("mysql")
def _mysql_drop_db(cfg, eng, ident):
with eng.connect() as conn:
- try:
- conn.execute("DROP DATABASE %s_test_schema" % ident)
- except Exception:
- pass
- try:
- conn.execute("DROP DATABASE %s_test_schema_2" % ident)
- except Exception:
- pass
- try:
- conn.execute("DROP DATABASE %s" % ident)
- except Exception:
- pass
+ conn.execute("DROP DATABASE %s_test_schema" % ident)
+ conn.execute("DROP DATABASE %s_test_schema_2" % ident)
+ conn.execute("DROP DATABASE %s" % ident)
+@_create_db.for_db("oracle")
+def _oracle_create_db(cfg, eng, ident):
+ # NOTE: make sure you've run "ALTER DATABASE default tablespace users" or
+ # similar, so that the default tablespace is not "system"; reflection will
+ # fail otherwise
+ with eng.connect() as conn:
+ conn.execute("create user %s identified by xe" % ident)
+ conn.execute("create user %s_ts1 identified by xe" % ident)
+ conn.execute("create user %s_ts2 identified by xe" % ident)
+ conn.execute("grant dba to %s" % (ident, ))
+ conn.execute("grant unlimited tablespace to %s" % ident)
+ conn.execute("grant unlimited tablespace to %s_ts1" % ident)
+ conn.execute("grant unlimited tablespace to %s_ts2" % ident)
+
+@_configure_follower.for_db("oracle")
+def _oracle_configure_follower(config, ident):
+ config.test_schema = "%s_ts1" % ident
+ config.test_schema_2 = "%s_ts2" % ident
+
+
+def _ora_drop_ignore(conn, dbname):
+ try:
+ conn.execute("drop user %s cascade" % dbname)
+ log.info("Reaped db: %s" % dbname)
+ return True
+ except exc.DatabaseError as err:
+ log.warn("couldn't drop db: %s" % err)
+ return False
+
+
+@_drop_db.for_db("oracle")
+def _oracle_drop_db(cfg, eng, ident):
+ with eng.connect() as conn:
+ # cx_Oracle seems to occasionally leak open connections when a large
+ # suite it run, even if we confirm we have zero references to
+ # connection objects.
+ # while there is a "kill session" command in Oracle,
+ # it unfortunately does not release the connection sufficiently.
+ _ora_drop_ignore(conn, ident)
+ _ora_drop_ignore(conn, "%s_ts1" % ident)
+ _ora_drop_ignore(conn, "%s_ts2" % ident)
+
+
+def reap_oracle_dbs(eng):
+ log.info("Reaping Oracle dbs...")
+ with eng.connect() as conn:
+ to_reap = conn.execute(
+ "select u.username from all_users u where username "
+ "like 'TEST_%' and not exists (select username "
+ "from v$session where username=u.username)")
+ all_names = set([username.lower() for (username, ) in to_reap])
+ to_drop = set()
+ for name in all_names:
+ if name.endswith("_ts1") or name.endswith("_ts2"):
+ continue
+ else:
+ to_drop.add(name)
+ if "%s_ts1" % name in all_names:
+ to_drop.add("%s_ts1" % name)
+ if "%s_ts2" % name in all_names:
+ to_drop.add("%s_ts2" % name)
+
+ dropped = total = 0
+ for total, username in enumerate(to_drop, 1):
+ if _ora_drop_ignore(conn, username):
+ dropped += 1
+ log.info(
+ "Dropped %d out of %d stale databases detected", dropped, total)
+
+
+@_follower_url_from_main.for_db("oracle")
+def _oracle_follower_url_from_main(url, ident):
+ url = sa_url.make_url(url)
+ url.username = ident
+ url.password = 'xe'
+ return url
diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py
index 1b5d6e883..d4c0dff8f 100644
--- a/lib/sqlalchemy/testing/requirements.py
+++ b/lib/sqlalchemy/testing/requirements.py
@@ -1,5 +1,5 @@
# testing/requirements.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/runner.py b/lib/sqlalchemy/testing/runner.py
index 92a03061e..b58aa019d 100644
--- a/lib/sqlalchemy/testing/runner.py
+++ b/lib/sqlalchemy/testing/runner.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# testing/runner.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py
index 257578668..c8526fbed 100644
--- a/lib/sqlalchemy/testing/schema.py
+++ b/lib/sqlalchemy/testing/schema.py
@@ -1,5 +1,5 @@
# testing/schema.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 288a85973..1874f6210 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -40,6 +40,15 @@ class ComponentReflectionTest(fixtures.TablesTest):
__backend__ = True
@classmethod
+ def setup_bind(cls):
+ if config.requirements.independent_connections.enabled:
+ from sqlalchemy import pool
+ return engines.testing_engine(
+ options=dict(poolclass=pool.StaticPool))
+ else:
+ return config.db
+
+ @classmethod
def define_tables(cls, metadata):
cls.define_reflected_tables(metadata, None)
if testing.requires.schemas.enabled:
@@ -202,7 +211,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.requires.temp_table_names
def test_get_temp_table_names(self):
- insp = inspect(testing.db)
+ insp = inspect(self.bind)
temp_table_names = insp.get_temp_table_names()
eq_(sorted(temp_table_names), ['user_tmp'])
@@ -210,7 +219,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.requires.temp_table_names
@testing.requires.temporary_views
def test_get_temp_view_names(self):
- insp = inspect(self.metadata.bind)
+ insp = inspect(self.bind)
temp_table_names = insp.get_temp_view_names()
eq_(sorted(temp_table_names), ['user_tmp_v'])
@@ -348,7 +357,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.requires.temp_table_reflection
def test_get_temp_table_columns(self):
- meta = MetaData(testing.db)
+ meta = MetaData(self.bind)
user_tmp = self.tables.user_tmp
insp = inspect(meta.bind)
cols = insp.get_columns('user_tmp')
@@ -361,7 +370,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.requires.view_column_reflection
@testing.requires.temporary_views
def test_get_temp_view_columns(self):
- insp = inspect(self.metadata.bind)
+ insp = inspect(self.bind)
cols = insp.get_columns('user_tmp_v')
eq_(
[col['name'] for col in cols],
@@ -503,7 +512,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.requires.temp_table_reflection
@testing.requires.unique_constraint_reflection
def test_get_temp_table_unique_constraints(self):
- insp = inspect(self.metadata.bind)
+ insp = inspect(self.bind)
reflected = insp.get_unique_constraints('user_tmp')
for refl in reflected:
# Different dialects handle duplicate index and constraints
@@ -513,7 +522,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.requires.temp_table_reflection
def test_get_temp_table_indexes(self):
- insp = inspect(self.metadata.bind)
+ insp = inspect(self.bind)
indexes = insp.get_indexes('user_tmp')
for ind in indexes:
ind.pop('dialect_options', None)
diff --git a/lib/sqlalchemy/testing/suite/test_sequence.py b/lib/sqlalchemy/testing/suite/test_sequence.py
index bbb4ba65c..b2d52f27c 100644
--- a/lib/sqlalchemy/testing/suite/test_sequence.py
+++ b/lib/sqlalchemy/testing/suite/test_sequence.py
@@ -86,11 +86,11 @@ class HasSequenceTest(fixtures.TestBase):
@testing.requires.schemas
def test_has_sequence_schema(self):
- s1 = Sequence('user_id_seq', schema="test_schema")
+ s1 = Sequence('user_id_seq', schema=config.test_schema)
testing.db.execute(schema.CreateSequence(s1))
try:
eq_(testing.db.dialect.has_sequence(
- testing.db, 'user_id_seq', schema="test_schema"), True)
+ testing.db, 'user_id_seq', schema=config.test_schema), True)
finally:
testing.db.execute(schema.DropSequence(s1))
@@ -101,7 +101,7 @@ class HasSequenceTest(fixtures.TestBase):
@testing.requires.schemas
def test_has_sequence_schemas_neg(self):
eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq',
- schema="test_schema"),
+ schema=config.test_schema),
False)
@testing.requires.schemas
@@ -110,14 +110,14 @@ class HasSequenceTest(fixtures.TestBase):
testing.db.execute(schema.CreateSequence(s1))
try:
eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq',
- schema="test_schema"),
+ schema=config.test_schema),
False)
finally:
testing.db.execute(schema.DropSequence(s1))
@testing.requires.schemas
def test_has_sequence_remote_not_in_default(self):
- s1 = Sequence('user_id_seq', schema="test_schema")
+ s1 = Sequence('user_id_seq', schema=config.test_schema)
testing.db.execute(schema.CreateSequence(s1))
try:
eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq'),
diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py
index e9437948a..754e2ad92 100644
--- a/lib/sqlalchemy/testing/util.py
+++ b/lib/sqlalchemy/testing/util.py
@@ -1,5 +1,5 @@
# testing/util.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -248,7 +248,7 @@ def drop_all_tables(engine, inspector, schema=None, include_names=None):
if include_names is not None and tname not in include_names:
continue
conn.execute(DropTable(
- Table(tname, MetaData())
+ Table(tname, MetaData(), schema=schema)
))
elif fkcs:
if not engine.dialect.supports_alter:
diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py
index 19b632d34..de372dcc4 100644
--- a/lib/sqlalchemy/testing/warnings.py
+++ b/lib/sqlalchemy/testing/warnings.py
@@ -1,5 +1,5 @@
# testing/warnings.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index ac6d3b439..e054cc857 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -1,5 +1,5 @@
# types.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py
index a15ca8efa..927adf722 100644
--- a/lib/sqlalchemy/util/__init__.py
+++ b/lib/sqlalchemy/util/__init__.py
@@ -1,5 +1,5 @@
# util/__init__.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index 3869775cf..c29b81f6a 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -1,5 +1,5 @@
# util/_collections.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -10,7 +10,8 @@
from __future__ import absolute_import
import weakref
import operator
-from .compat import threading, itertools_filterfalse, string_types
+from .compat import threading, itertools_filterfalse, string_types, \
+ binary_types
from . import py2k
import types
import collections
@@ -794,7 +795,8 @@ def coerce_generator_arg(arg):
def to_list(x, default=None):
if x is None:
return default
- if not isinstance(x, collections.Iterable) or isinstance(x, string_types):
+ if not isinstance(x, collections.Iterable) or \
+ isinstance(x, string_types + binary_types):
return [x]
elif isinstance(x, list):
return x
diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py
index 737b8a087..ee4a20f9b 100644
--- a/lib/sqlalchemy/util/compat.py
+++ b/lib/sqlalchemy/util/compat.py
@@ -1,5 +1,5 @@
# util/compat.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -62,6 +62,7 @@ if py3k:
)
string_types = str,
+ binary_types = bytes,
binary_type = bytes
text_type = str
int_types = int,
@@ -115,6 +116,7 @@ else:
from cStringIO import StringIO as byte_buffer
string_types = basestring,
+ binary_types = bytes,
binary_type = str
text_type = unicode
int_types = int, long
diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py
index 4c7ea47e3..12fa63602 100644
--- a/lib/sqlalchemy/util/deprecations.py
+++ b/lib/sqlalchemy/util/deprecations.py
@@ -1,5 +1,5 @@
# util/deprecations.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index 11aa9384d..4675f7cdb 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -1,5 +1,5 @@
# util/langhelpers.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
@@ -59,6 +59,13 @@ class safe_reraise(object):
self._exc_info = None # remove potential circular references
compat.reraise(exc_type, exc_value, exc_tb)
else:
+ if not compat.py3k and self._exc_info and self._exc_info[1]:
+ # emulate Py3K's behavior of telling us when an exception
+ # occurs in an exception handler.
+ warn(
+ "An exception has occurred during handling of a "
+ "previous exception. The previous exception "
+ "is:\n %s %s\n" % (self._exc_info[0], self._exc_info[1]))
self._exc_info = None # remove potential circular references
compat.reraise(type_, value, traceback)
diff --git a/lib/sqlalchemy/util/queue.py b/lib/sqlalchemy/util/queue.py
index 29e00a434..221347158 100644
--- a/lib/sqlalchemy/util/queue.py
+++ b/lib/sqlalchemy/util/queue.py
@@ -1,5 +1,5 @@
# util/queue.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py
index 0cd2bae29..5c5c54c53 100644
--- a/lib/sqlalchemy/util/topological.py
+++ b/lib/sqlalchemy/util/topological.py
@@ -1,5 +1,5 @@
# util/topological.py
-# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
diff --git a/reap_oracle_dbs.py b/reap_oracle_dbs.py
new file mode 100644
index 000000000..ff638a01e
--- /dev/null
+++ b/reap_oracle_dbs.py
@@ -0,0 +1,24 @@
+"""Drop Oracle databases that are left over from a
+multiprocessing test run.
+
+Currently the cx_Oracle driver seems to sometimes not release a
+TCP connection even if close() is called, which prevents the provisioning
+system from dropping a database in-process.
+
+"""
+from sqlalchemy.testing.plugin import plugin_base
+from sqlalchemy.testing import engines
+from sqlalchemy.testing import provision
+import logging
+
+logging.basicConfig()
+logging.getLogger(provision.__name__).setLevel(logging.INFO)
+
+plugin_base.read_config()
+oracle = plugin_base.file_config.get('db', 'oracle')
+from sqlalchemy.testing import provision
+
+engine = engines.testing_engine(oracle, {})
+provision.reap_oracle_dbs(engine)
+
+
diff --git a/setup.cfg b/setup.cfg
index 3881f2b99..e8195cf5b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -24,7 +24,7 @@ profile_file=test/profiles.txt
# name of a "loopback" link set up on the oracle database.
# to create this, suppose your DB is scott/tiger@xe. You'd create it
# like:
-# create database link test_link connect to scott identified by tiger using 'xe';
+# create public database link test_link connect to scott identified by tiger using 'xe';
oracle_db_link = test_link
# host name of a postgres database that has the postgres_fdw extension.
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index 6d162ff4d..fcb9a59a3 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -3,11 +3,11 @@ import sys
from sqlalchemy import util, sql, exc, testing
from sqlalchemy.testing import assert_raises, assert_raises_message, fixtures
-from sqlalchemy.testing import eq_, is_, ne_, fails_if, mock
+from sqlalchemy.testing import eq_, is_, ne_, fails_if, mock, expect_warnings
from sqlalchemy.testing.util import picklers, gc_collect
from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec
from sqlalchemy.sql import column
-from sqlalchemy.util import langhelpers
+from sqlalchemy.util import langhelpers, compat
import inspect
@@ -412,6 +412,20 @@ class ToListTest(fixtures.TestBase):
[1, 2, 3]
)
+ def test_from_bytes(self):
+
+ eq_(
+ util.to_list(compat.b('abc')),
+ [compat.b('abc')]
+ )
+
+ eq_(
+ util.to_list([
+ compat.b('abc'), compat.b('def')]),
+ [compat.b('abc'), compat.b('def')]
+ )
+
+
class ColumnCollectionTest(fixtures.TestBase):
def test_in(self):
@@ -2192,6 +2206,38 @@ class ReraiseTest(fixtures.TestBase):
if testing.requires.python3.enabled:
is_(moe.__cause__, me)
+ @testing.requires.python2
+ def test_safe_reraise_py2k_warning(self):
+ class MyException(Exception):
+ pass
+
+ class MyOtherException(Exception):
+ pass
+
+ m1 = MyException("exc one")
+ m2 = MyOtherException("exc two")
+
+ def go2():
+ raise m2
+
+ def go():
+ try:
+ raise m1
+ except:
+ with util.safe_reraise():
+ go2()
+
+ with expect_warnings(
+ "An exception has occurred during handling of a previous "
+ "exception. The previous exception "
+ "is:.*MyException.*exc one"
+ ):
+ try:
+ go()
+ assert False
+ except MyOtherException:
+ pass
+
class TestClassProperty(fixtures.TestBase):
diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py
index d91c79db2..b59ca4fd1 100644
--- a/test/dialect/mssql/test_compiler.py
+++ b/test/dialect/mssql/test_compiler.py
@@ -1,5 +1,5 @@
# -*- encoding: utf-8
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, is_
from sqlalchemy import schema
from sqlalchemy.sql import table, column
from sqlalchemy.databases import mssql
@@ -521,6 +521,30 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
assert t.c.x in set(c._create_result_map()['x'][1])
assert t.c.y in set(c._create_result_map()['y'][1])
+ def test_limit_offset_w_ambiguous_cols(self):
+ t = table('t', column('x', Integer), column('y', Integer))
+
+ cols = [t.c.x, t.c.x.label('q'), t.c.x.label('p'), t.c.y]
+ s = select(cols).where(t.c.x == 5).order_by(t.c.y).limit(10).offset(20)
+
+ self.assert_compile(
+ s,
+ "SELECT anon_1.x, anon_1.q, anon_1.p, anon_1.y "
+ "FROM (SELECT t.x AS x, t.x AS q, t.x AS p, t.y AS y, "
+ "ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
+ "FROM t "
+ "WHERE t.x = :x_1) AS anon_1 "
+ "WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
+ checkparams={'param_1': 20, 'param_2': 10, 'x_1': 5}
+ )
+ c = s.compile(dialect=mssql.MSDialect())
+ eq_(len(c._result_columns), 4)
+
+ result_map = c._create_result_map()
+
+ for col in cols:
+ is_(result_map[col.key][1][0], col)
+
def test_limit_offset_with_correlated_order_by(self):
t1 = table('t1', column('x', Integer), column('y', Integer))
t2 = table('t2', column('x', Integer), column('y', Integer))
diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py
index 32edfd7eb..81fa2fb1e 100644
--- a/test/dialect/mssql/test_query.py
+++ b/test/dialect/mssql/test_query.py
@@ -134,6 +134,7 @@ class LegacySchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'mssql'
__dialect__ = mssql.MSDialect()
+ __backend__ = True
@classmethod
def setup_class(cls):
@@ -187,6 +188,7 @@ class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL):
class QueryUnicodeTest(fixtures.TestBase):
__only_on__ = 'mssql'
+ __backend__ = True
def test_convert_unicode(self):
meta = MetaData(testing.db)
@@ -214,6 +216,7 @@ class QueryUnicodeTest(fixtures.TestBase):
class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase):
__only_on__ = 'mssql'
+ __backend__ = True
def test_fetchid_trigger(self):
"""
@@ -484,6 +487,7 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'mssql'
__skip_if__ = full_text_search_missing,
+ __backend__ = True
@classmethod
def setup_class(cls):
diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py
index e016a6e41..ccde93ba1 100644
--- a/test/dialect/mssql/test_reflection.py
+++ b/test/dialect/mssql/test_reflection.py
@@ -12,6 +12,7 @@ from sqlalchemy import util
class ReflectionTest(fixtures.TestBase, ComparesTables):
__only_on__ = 'mssql'
+ __backend__ = True
@testing.provide_metadata
def test_basic_reflection(self):
@@ -229,6 +230,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
class ReflectHugeViewTest(fixtures.TestBase):
__only_on__ = 'mssql'
+ __backend__ = True
# crashes on freetds 0.91, not worth it
__skip_if__ = (
diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py
index 100e4e0ed..37c0e7060 100644
--- a/test/dialect/mssql/test_types.py
+++ b/test/dialect/mssql/test_types.py
@@ -1,5 +1,6 @@
# -*- encoding: utf-8
from sqlalchemy.testing import eq_, engines, pickleable, assert_raises_message
+from sqlalchemy.testing import is_, is_not_
import datetime
import os
from sqlalchemy import Table, Column, MetaData, Float, \
@@ -725,44 +726,37 @@ class TypeRoundTripTest(
def test_autoincrement(self):
Table(
'ai_1', metadata,
- Column('int_y', Integer, primary_key=True),
+ Column('int_y', Integer, primary_key=True, autoincrement=True),
Column(
- 'int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
+ 'int_n', Integer, DefaultClause('0'), primary_key=True))
Table(
'ai_2', metadata,
- Column('int_y', Integer, primary_key=True),
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
+ Column('int_y', Integer, primary_key=True, autoincrement=True),
+ Column('int_n', Integer, DefaultClause('0'), primary_key=True))
Table(
'ai_3', metadata,
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False),
- Column('int_y', Integer, primary_key=True))
+ Column('int_n', Integer, DefaultClause('0'), primary_key=True),
+ Column('int_y', Integer, primary_key=True, autoincrement=True))
Table(
'ai_4', metadata,
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False),
- Column('int_n2', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
+ Column('int_n', Integer, DefaultClause('0'), primary_key=True),
+ Column('int_n2', Integer, DefaultClause('0'), primary_key=True))
Table(
'ai_5', metadata,
- Column('int_y', Integer, primary_key=True),
- Column('int_n', Integer, DefaultClause('0'),
- primary_key=True, autoincrement=False))
+ Column('int_y', Integer, primary_key=True, autoincrement=True),
+ Column('int_n', Integer, DefaultClause('0'), primary_key=True))
Table(
'ai_6', metadata,
- Column('o1', String(1), DefaultClause('x'),
- primary_key=True),
- Column('int_y', Integer, primary_key=True))
+ Column('o1', String(1), DefaultClause('x'), primary_key=True),
+ Column('int_y', Integer, primary_key=True, autoincrement=True))
Table(
'ai_7', metadata,
Column('o1', String(1), DefaultClause('x'),
primary_key=True),
Column('o2', String(1), DefaultClause('x'),
primary_key=True),
- Column('int_y', Integer, primary_key=True))
+ Column('int_y', Integer, autoincrement=True, primary_key=True))
Table(
'ai_8', metadata,
Column('o1', String(1), DefaultClause('x'),
@@ -778,13 +772,15 @@ class TypeRoundTripTest(
for name in table_names:
tbl = Table(name, mr, autoload=True)
tbl = metadata.tables[name]
- for c in tbl.c:
- if c.name.startswith('int_y'):
- assert c.autoincrement, name
- assert tbl._autoincrement_column is c, name
- elif c.name.startswith('int_n'):
- assert not c.autoincrement, name
- assert tbl._autoincrement_column is not c, name
+
+ # test that the flag itself reflects appropriately
+ for col in tbl.c:
+ if 'int_y' in col.name:
+ is_(col.autoincrement, True)
+ is_(tbl._autoincrement_column, col)
+ else:
+ eq_(col.autoincrement, 'auto')
+ is_not_(tbl._autoincrement_column, col)
# mxodbc can't handle scope_identity() with DEFAULT VALUES
diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py
index 0571ce526..8a7893445 100644
--- a/test/dialect/mysql/test_compiler.py
+++ b/test/dialect/mysql/test_compiler.py
@@ -576,3 +576,30 @@ class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
'PRIMARY KEY (id, other_id)'
')PARTITION BY HASH(other_id) PARTITIONS 2'
)
+
+ def test_inner_join(self):
+ t1 = table('t1', column('x'))
+ t2 = table('t2', column('y'))
+
+ self.assert_compile(
+ t1.join(t2, t1.c.x == t2.c.y),
+ "t1 INNER JOIN t2 ON t1.x = t2.y"
+ )
+
+ def test_outer_join(self):
+ t1 = table('t1', column('x'))
+ t2 = table('t2', column('y'))
+
+ self.assert_compile(
+ t1.outerjoin(t2, t1.c.x == t2.c.y),
+ "t1 LEFT OUTER JOIN t2 ON t1.x = t2.y"
+ )
+
+ def test_full_outer_join(self):
+ t1 = table('t1', column('x'))
+ t2 = table('t2', column('y'))
+
+ self.assert_compile(
+ t1.outerjoin(t2, t1.c.x == t2.c.y, full=True),
+ "t1 FULL OUTER JOIN t2 ON t1.x = t2.y"
+ ) \ No newline at end of file
diff --git a/test/dialect/mysql/test_dialect.py b/test/dialect/mysql/test_dialect.py
index 1014cad25..ab719b368 100644
--- a/test/dialect/mysql/test_dialect.py
+++ b/test/dialect/mysql/test_dialect.py
@@ -11,6 +11,7 @@ import datetime
class DialectTest(fixtures.TestBase):
__backend__ = True
+ __only_on__ = 'mysql'
def test_ssl_arguments_mysqldb(self):
from sqlalchemy.dialects.mysql import mysqldb
@@ -97,6 +98,29 @@ class DialectTest(fixtures.TestBase):
conn = eng.connect()
eq_(conn.dialect._connection_charset, enc)
+ def test_autocommit_isolation_level(self):
+ c = testing.db.connect().execution_options(
+ isolation_level='AUTOCOMMIT'
+ )
+ assert c.execute('SELECT @@autocommit;').scalar()
+
+ c = c.execution_options(isolation_level='READ COMMITTED')
+ assert not c.execute('SELECT @@autocommit;').scalar()
+
+ def test_isolation_level(self):
+ values = {
+ # sqlalchemy -> mysql
+ 'READ UNCOMMITTED': 'READ-UNCOMMITTED',
+ 'READ COMMITTED': 'READ-COMMITTED',
+ 'REPEATABLE READ': 'REPEATABLE-READ',
+ 'SERIALIZABLE': 'SERIALIZABLE'
+ }
+ for sa_value, mysql_value in values.items():
+ c = testing.db.connect().execution_options(
+ isolation_level=sa_value
+ )
+ assert c.execute('SELECT @@tx_isolation;').scalar() == mysql_value
+
class SQLModeDetectionTest(fixtures.TestBase):
__only_on__ = 'mysql'
__backend__ = True
@@ -162,3 +186,4 @@ class ExecutionTest(fixtures.TestBase):
def test_sysdate(self):
d = testing.db.scalar(func.sysdate())
assert isinstance(d, datetime.datetime)
+
diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py
index 1fb152377..e570e0db1 100644
--- a/test/dialect/mysql/test_types.py
+++ b/test/dialect/mysql/test_types.py
@@ -976,12 +976,12 @@ class EnumSetTest(
eq_(
t2.c.value.type.enums[0:2],
- (u('réveillé'), u('drôle')) # u'S’il') # eh ?
+ [u('réveillé'), u('drôle')] # u'S’il') # eh ?
)
eq_(
t2.c.value2.type.enums[0:2],
- (u('réveillé'), u('drôle')) # u'S’il') # eh ?
+ [u('réveillé'), u('drôle')] # u'S’il') # eh ?
)
def test_enum_compile(self):
@@ -1019,13 +1019,13 @@ class EnumSetTest(
reflected = Table('mysql_enum', MetaData(testing.db),
autoload=True)
for t in enum_table, reflected:
- eq_(t.c.e1.type.enums, ("a",))
- eq_(t.c.e2.type.enums, ("",))
- eq_(t.c.e3.type.enums, ("a",))
- eq_(t.c.e4.type.enums, ("",))
- eq_(t.c.e5.type.enums, ("a", ""))
- eq_(t.c.e6.type.enums, ("", "a"))
- eq_(t.c.e7.type.enums, ("", "'a'", "b'b", "'"))
+ eq_(t.c.e1.type.enums, ["a"])
+ eq_(t.c.e2.type.enums, [""])
+ eq_(t.c.e3.type.enums, ["a"])
+ eq_(t.c.e4.type.enums, [""])
+ eq_(t.c.e5.type.enums, ["a", ""])
+ eq_(t.c.e6.type.enums, ["", "a"])
+ eq_(t.c.e7.type.enums, ["", "'a'", "b'b", "'"])
@testing.provide_metadata
@testing.exclude('mysql', '<', (5,))
diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py
index 52620bb78..c0e1819d6 100644
--- a/test/dialect/postgresql/test_dialect.py
+++ b/test/dialect/postgresql/test_dialect.py
@@ -15,6 +15,9 @@ import logging
import logging.handlers
from sqlalchemy.testing.mock import Mock
from sqlalchemy.engine import engine_from_config
+from sqlalchemy.engine import url
+from sqlalchemy.testing import is_
+from sqlalchemy.testing import expect_deprecated
class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
@@ -79,6 +82,13 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
psycopg2.Error)
assert isinstance(exception, exc.OperationalError)
+ def test_deprecated_dialect_name_still_loads(self):
+ with expect_deprecated(
+ "The 'postgres' dialect name "
+ "has been renamed to 'postgresql'"):
+ dialect = url.URL("postgres").get_dialect()
+ is_(dialect, postgresql.dialect)
+
# currently not passing with pg 9.3 that does not seem to generate
# any notices here, would rather find a way to mock this
@testing.requires.no_coverage
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index 851facd2a..8da18108f 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -202,7 +202,7 @@ class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
table = Table('enum_test', metadata, autoload=True)
eq_(
table.c.data.type.enums,
- ('test', )
+ ['test']
)
def test_table_is_reflected_test_schema(self):
diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py
index 50b66f290..8818a9941 100644
--- a/test/dialect/postgresql/test_types.py
+++ b/test/dialect/postgresql/test_types.py
@@ -170,7 +170,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
t2 = Table('table', m2, autoload=True)
eq_(
t2.c.value.type.enums,
- (util.u('réveillé'), util.u('drôle'), util.u('S’il'))
+ [util.u('réveillé'), util.u('drôle'), util.u('S’il')]
)
@testing.provide_metadata
@@ -445,8 +445,9 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
e.connect()
assert not dialect.supports_native_enum
+ @testing.provide_metadata
def test_reflection(self):
- metadata = MetaData(testing.db)
+ metadata = self.metadata
etype = Enum('four', 'five', 'six', name='fourfivesixtype',
metadata=metadata)
t1 = Table(
@@ -458,18 +459,16 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
'one', 'two', 'three', name='onetwothreetype')),
Column('value2', etype))
metadata.create_all()
- try:
- m2 = MetaData(testing.db)
- t2 = Table('table', m2, autoload=True)
- assert t2.c.value.type.enums == ('one', 'two', 'three')
- assert t2.c.value.type.name == 'onetwothreetype'
- assert t2.c.value2.type.enums == ('four', 'five', 'six')
- assert t2.c.value2.type.name == 'fourfivesixtype'
- finally:
- metadata.drop_all()
+ m2 = MetaData(testing.db)
+ t2 = Table('table', m2, autoload=True)
+ eq_(t2.c.value.type.enums, ['one', 'two', 'three'])
+ eq_(t2.c.value.type.name, 'onetwothreetype')
+ eq_(t2.c.value2.type.enums, ['four', 'five', 'six'])
+ eq_(t2.c.value2.type.name, 'fourfivesixtype')
+ @testing.provide_metadata
def test_schema_reflection(self):
- metadata = MetaData(testing.db)
+ metadata = self.metadata
etype = Enum(
'four',
'five',
@@ -478,7 +477,7 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
schema='test_schema',
metadata=metadata,
)
- t1 = Table(
+ Table(
'table', metadata,
Column(
'id', Integer, primary_key=True),
@@ -488,16 +487,13 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
name='onetwothreetype', schema='test_schema')),
Column('value2', etype))
metadata.create_all()
- try:
- m2 = MetaData(testing.db)
- t2 = Table('table', m2, autoload=True)
- assert t2.c.value.type.enums == ('one', 'two', 'three')
- assert t2.c.value.type.name == 'onetwothreetype'
- assert t2.c.value2.type.enums == ('four', 'five', 'six')
- assert t2.c.value2.type.name == 'fourfivesixtype'
- assert t2.c.value2.type.schema == 'test_schema'
- finally:
- metadata.drop_all()
+ m2 = MetaData(testing.db)
+ t2 = Table('table', m2, autoload=True)
+ eq_(t2.c.value.type.enums, ['one', 'two', 'three'])
+ eq_(t2.c.value.type.name, 'onetwothreetype')
+ eq_(t2.c.value2.type.enums, ['four', 'five', 'six'])
+ eq_(t2.c.value2.type.name, 'fourfivesixtype')
+ eq_(t2.c.value2.type.schema, 'test_schema')
@testing.provide_metadata
def test_custom_subclass(self):
@@ -772,6 +768,15 @@ class ArrayTest(AssertsCompiledSQL, fixtures.TestBase):
checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
)
+ def test_contains_override_raises(self):
+ col = column('x', postgresql.ARRAY(Integer))
+
+ assert_raises_message(
+ NotImplementedError,
+ "Operator 'contains' is not supported on this expression",
+ lambda: 'foo' in col
+ )
+
def test_array_contained_by(self):
col = column('x', postgresql.ARRAY(Integer))
self.assert_compile(
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index dd4a888ff..1f3e63040 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -23,6 +23,7 @@ from sqlalchemy.testing.mock import Mock
class OutParamTest(fixtures.TestBase, AssertsExecutionResults):
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
@classmethod
def setup_class(cls):
@@ -55,6 +56,7 @@ class OutParamTest(fixtures.TestBase, AssertsExecutionResults):
class CXOracleArgsTest(fixtures.TestBase):
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
def test_autosetinputsizes(self):
dialect = cx_oracle.dialect()
@@ -76,6 +78,7 @@ class CXOracleArgsTest(fixtures.TestBase):
class QuotedBindRoundTripTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.provide_metadata
def test_table_round_trip(self):
@@ -834,6 +837,7 @@ class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL):
class MultiSchemaTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'oracle'
+ __backend__ = True
@classmethod
def setup_class(cls):
@@ -861,7 +865,7 @@ create table local_table(
create synonym %(test_schema)s.ptable for %(test_schema)s.parent;
create synonym %(test_schema)s.ctable for %(test_schema)s.child;
-create synonym %(test_schema)s_ptable for %(test_schema)s.parent;
+create synonym %(test_schema)s_pt for %(test_schema)s.parent;
create synonym %(test_schema)s.local_table for local_table;
@@ -883,7 +887,7 @@ drop table %(test_schema)s.parent;
drop table local_table;
drop synonym %(test_schema)s.ctable;
drop synonym %(test_schema)s.ptable;
-drop synonym %(test_schema)s_ptable;
+drop synonym %(test_schema)s_pt;
drop synonym %(test_schema)s.local_table;
""" % {"test_schema": testing.config.test_schema}).split(";"):
@@ -910,11 +914,12 @@ drop synonym %(test_schema)s.local_table;
def test_reflect_alt_table_owner_local_synonym(self):
meta = MetaData(testing.db)
- parent = Table('test_schema_ptable', meta, autoload=True,
+ parent = Table('%s_pt' % testing.config.test_schema, meta, autoload=True,
oracle_resolve_synonyms=True)
self.assert_compile(parent.select(),
- "SELECT test_schema_ptable.id, "
- "test_schema_ptable.data FROM test_schema_ptable")
+ "SELECT %(test_schema)s_pt.id, "
+ "%(test_schema)s_pt.data FROM %(test_schema)s_pt"
+ % {"test_schema": testing.config.test_schema})
select([parent]).execute().fetchall()
def test_reflect_alt_synonym_owner_local_table(self):
@@ -1045,6 +1050,7 @@ drop synonym %(test_schema)s.local_table;
class ConstraintTest(fixtures.TablesTest):
__only_on__ = 'oracle'
+ __backend__ = True
run_deletes = None
@classmethod
@@ -1071,6 +1077,7 @@ class TwoPhaseTest(fixtures.TablesTest):
so requires a carefully written test."""
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
@classmethod
def define_tables(cls, metadata):
@@ -1235,6 +1242,7 @@ class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL):
class TypesTest(fixtures.TestBase):
__only_on__ = 'oracle'
__dialect__ = oracle.OracleDialect()
+ __backend__ = True
@testing.fails_on('+zxjdbc', 'zxjdbc lacks the FIXED_CHAR dbapi type')
@@ -1692,6 +1700,7 @@ class EuroNumericTest(fixtures.TestBase):
"""test the numeric output_type_handler when using non-US locale for NLS_LANG."""
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
def setup(self):
self.old_nls_lang = os.environ.get('NLS_LANG', False)
@@ -1729,6 +1738,7 @@ class DontReflectIOTTest(fixtures.TestBase):
table_names."""
__only_on__ = 'oracle'
+ __backend__ = True
def setup(self):
testing.db.execute("""
@@ -1757,6 +1767,7 @@ class DontReflectIOTTest(fixtures.TestBase):
class BufferedColumnTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'oracle'
+ __backend__ = True
@classmethod
def setup_class(cls):
@@ -1794,6 +1805,7 @@ class BufferedColumnTest(fixtures.TestBase, AssertsCompiledSQL):
class UnsupportedIndexReflectTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.emits_warning("No column names")
@testing.provide_metadata
@@ -1813,6 +1825,9 @@ class UnsupportedIndexReflectTest(fixtures.TestBase):
def all_tables_compression_missing():
try:
testing.db.execute('SELECT compression FROM all_tables')
+ if "Enterprise Edition" not in testing.db.scalar(
+ "select * from v$version"):
+ return True
return False
except:
return True
@@ -1821,6 +1836,9 @@ def all_tables_compression_missing():
def all_tables_compress_for_missing():
try:
testing.db.execute('SELECT compress_for FROM all_tables')
+ if "Enterprise Edition" not in testing.db.scalar(
+ "select * from v$version"):
+ return True
return False
except:
return True
@@ -1828,6 +1846,7 @@ def all_tables_compress_for_missing():
class TableReflectionTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.provide_metadata
@testing.fails_if(all_tables_compression_missing)
@@ -1888,6 +1907,7 @@ class TableReflectionTest(fixtures.TestBase):
class RoundTripIndexTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.provide_metadata
def test_basic(self):
@@ -1983,6 +2003,7 @@ class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
class ExecuteTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
def test_basic(self):
eq_(testing.db.execute('/*+ this is a comment */ SELECT 1 FROM '
@@ -2035,6 +2056,7 @@ class ExecuteTest(fixtures.TestBase):
class UnicodeSchemaTest(fixtures.TestBase):
__only_on__ = 'oracle'
+ __backend__ = True
@testing.provide_metadata
def test_quoted_column_non_unicode(self):
@@ -2072,12 +2094,16 @@ class UnicodeSchemaTest(fixtures.TestBase):
class DBLinkReflectionTest(fixtures.TestBase):
__requires__ = 'oracle_test_dblink',
__only_on__ = 'oracle'
+ __backend__ = True
@classmethod
def setup_class(cls):
from sqlalchemy.testing import config
cls.dblink = config.file_config.get('sqla_testing', 'oracle_db_link')
+ # note that the synonym here is still not totally functional
+ # when accessing via a different username as we do with the multiprocess
+ # test suite, so testing here is minimal
with testing.db.connect() as conn:
conn.execute(
"create table test_table "
@@ -2091,15 +2117,6 @@ class DBLinkReflectionTest(fixtures.TestBase):
conn.execute("drop synonym test_table_syn")
conn.execute("drop table test_table")
- def test_hello_world(self):
- """test that the synonym/dblink is functional."""
- testing.db.execute("insert into test_table_syn (id, data) "
- "values (1, 'some data')")
- eq_(
- testing.db.execute("select * from test_table_syn").first(),
- (1, 'some data')
- )
-
def test_reflection(self):
"""test the resolution of the synonym/dblink. """
m = MetaData()
@@ -2112,6 +2129,7 @@ class DBLinkReflectionTest(fixtures.TestBase):
class ServiceNameTest(fixtures.TestBase):
__only_on__ = 'oracle+cx_oracle'
+ __backend__ = True
def test_cx_oracle_service_name(self):
url_string = 'oracle+cx_oracle://scott:tiger@host/?service_name=hr'
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py
index 33903ff89..580950b12 100644
--- a/test/dialect/test_sqlite.py
+++ b/test/dialect/test_sqlite.py
@@ -8,7 +8,7 @@ from sqlalchemy.testing import eq_, assert_raises, \
assert_raises_message, is_
from sqlalchemy import Table, select, bindparam, Column,\
MetaData, func, extract, ForeignKey, text, DefaultClause, and_, \
- create_engine, UniqueConstraint, Index
+ create_engine, UniqueConstraint, Index, PrimaryKeyConstraint
from sqlalchemy.types import Integer, String, Boolean, DateTime, Date, Time
from sqlalchemy import types as sqltypes
from sqlalchemy import event, inspect
@@ -1130,6 +1130,18 @@ class ConstraintReflectionTest(fixtures.TestBase):
prefixes=['TEMPORARY']
)
+ Table(
+ 'p', meta,
+ Column('id', Integer),
+ PrimaryKeyConstraint('id', name='pk_name'),
+ )
+
+ Table(
+ 'q', meta,
+ Column('id', Integer),
+ PrimaryKeyConstraint('id'),
+ )
+
meta.create_all(conn)
# will contain an "autoindex"
@@ -1224,8 +1236,6 @@ class ConstraintReflectionTest(fixtures.TestBase):
def test_unnamed_inline_foreign_key_quoted(self):
inspector = Inspector(testing.db)
-
- inspector = Inspector(testing.db)
fks = inspector.get_foreign_keys('e1')
eq_(
fks,
@@ -1342,6 +1352,27 @@ class ConstraintReflectionTest(fixtures.TestBase):
[{'column_names': ['x'], 'name': None}]
)
+ def test_primary_key_constraint_named(self):
+ inspector = Inspector(testing.db)
+ eq_(
+ inspector.get_pk_constraint("p"),
+ {'constrained_columns': ['id'], 'name': 'pk_name'}
+ )
+
+ def test_primary_key_constraint_unnamed(self):
+ inspector = Inspector(testing.db)
+ eq_(
+ inspector.get_pk_constraint("q"),
+ {'constrained_columns': ['id'], 'name': None}
+ )
+
+ def test_primary_key_constraint_no_pk(self):
+ inspector = Inspector(testing.db)
+ eq_(
+ inspector.get_pk_constraint("d"),
+ {'constrained_columns': [], 'name': None}
+ )
+
class SavepointTest(fixtures.TablesTest):
diff --git a/test/engine/test_logging.py b/test/engine/test_logging.py
index 180ea9388..51ebc5250 100644
--- a/test/engine/test_logging.py
+++ b/test/engine/test_logging.py
@@ -1,4 +1,4 @@
-from sqlalchemy.testing import eq_, assert_raises_message
+from sqlalchemy.testing import eq_, assert_raises_message, eq_regex
from sqlalchemy import select
import sqlalchemy as tsa
from sqlalchemy.testing import engines
@@ -6,6 +6,7 @@ import logging.handlers
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import mock
from sqlalchemy.testing.util import lazy_gc
+from sqlalchemy import util
class LogParamsTest(fixtures.TestBase):
@@ -53,6 +54,133 @@ class LogParamsTest(fixtures.TestBase):
"bound parameter sets ... ('98',), ('99',)]"
)
+ def test_log_large_parameter_single(self):
+ import random
+ largeparam = ''.join(chr(random.randint(52, 85)) for i in range(5000))
+
+ self.eng.execute(
+ "INSERT INTO foo (data) values (?)",
+ (largeparam, )
+ )
+
+ eq_(
+ self.buf.buffer[1].message,
+ "('%s ... (4702 characters truncated) ... %s',)" % (
+ largeparam[0:149], largeparam[-149:]
+ )
+ )
+
+ def test_log_large_multi_parameter(self):
+ import random
+ lp1 = ''.join(chr(random.randint(52, 85)) for i in range(5))
+ lp2 = ''.join(chr(random.randint(52, 85)) for i in range(8))
+ lp3 = ''.join(chr(random.randint(52, 85)) for i in range(670))
+
+ self.eng.execute(
+ "SELECT ?, ?, ?",
+ (lp1, lp2, lp3)
+ )
+
+ eq_(
+ self.buf.buffer[1].message,
+ "('%s', '%s', '%s ... (372 characters truncated) ... %s')" % (
+ lp1, lp2, lp3[0:149], lp3[-149:]
+ )
+ )
+
+ def test_log_large_parameter_multiple(self):
+ import random
+ lp1 = ''.join(chr(random.randint(52, 85)) for i in range(5000))
+ lp2 = ''.join(chr(random.randint(52, 85)) for i in range(200))
+ lp3 = ''.join(chr(random.randint(52, 85)) for i in range(670))
+
+ self.eng.execute(
+ "INSERT INTO foo (data) values (?)",
+ [(lp1, ), (lp2, ), (lp3, )]
+ )
+
+ eq_(
+ self.buf.buffer[1].message,
+ "[('%s ... (4702 characters truncated) ... %s',), ('%s',), "
+ "('%s ... (372 characters truncated) ... %s',)]" % (
+ lp1[0:149], lp1[-149:], lp2, lp3[0:149], lp3[-149:]
+ )
+ )
+
+ def test_exception_format_dict_param(self):
+ exception = tsa.exc.IntegrityError("foo", {"x": "y"}, None)
+ eq_regex(
+ str(exception),
+ r"\(.*.NoneType\) None \[SQL: 'foo'\] \[parameters: {'x': 'y'}\]"
+ )
+
+ def test_exception_format_unexpected_parameter(self):
+ # test that if the parameters aren't any known type, we just
+ # run through repr()
+ exception = tsa.exc.IntegrityError("foo", "bar", "bat")
+ eq_regex(
+ str(exception),
+ r"\(.*.str\) bat \[SQL: 'foo'\] \[parameters: 'bar'\]"
+ )
+
+ def test_exception_format_unexpected_member_parameter(self):
+ # test that if the parameters aren't any known type, we just
+ # run through repr()
+ exception = tsa.exc.IntegrityError("foo", ["bar", "bat"], "hoho")
+ eq_regex(
+ str(exception),
+ r"\(.*.str\) hoho \[SQL: 'foo'\] \[parameters: \['bar', 'bat'\]\]"
+ )
+
+ def test_result_large_param(self):
+ import random
+ largeparam = ''.join(chr(random.randint(52, 85)) for i in range(5000))
+
+ self.eng.echo = 'debug'
+ result = self.eng.execute(
+ "SELECT ?",
+ (largeparam, )
+ )
+
+ row = result.first()
+
+ eq_(
+ self.buf.buffer[1].message,
+ "('%s ... (4702 characters truncated) ... %s',)" % (
+ largeparam[0:149], largeparam[-149:]
+ )
+ )
+
+ if util.py3k:
+ eq_(
+ self.buf.buffer[3].message,
+ "Row ('%s ... (4702 characters truncated) ... %s',)" % (
+ largeparam[0:149], largeparam[-149:]
+ )
+ )
+ else:
+ eq_(
+ self.buf.buffer[3].message,
+ "Row (u'%s ... (4703 characters truncated) ... %s',)" % (
+ largeparam[0:148], largeparam[-149:]
+ )
+ )
+
+ if util.py3k:
+ eq_(
+ repr(row),
+ "('%s ... (4702 characters truncated) ... %s',)" % (
+ largeparam[0:149], largeparam[-149:]
+ )
+ )
+ else:
+ eq_(
+ repr(row),
+ "(u'%s ... (4703 characters truncated) ... %s',)" % (
+ largeparam[0:148], largeparam[-149:]
+ )
+ )
+
def test_error_large_dict(self):
assert_raises_message(
tsa.exc.DBAPIError,
diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py
index 8551e1fcb..4547984ab 100644
--- a/test/engine/test_pool.py
+++ b/test/engine/test_pool.py
@@ -345,6 +345,66 @@ class PoolEventsTest(PoolTestBase):
return p, canary
+ def _close_event_fixture(self):
+ p = self._queuepool_fixture()
+ canary = Mock()
+ event.listen(p, 'close', canary)
+
+ return p, canary
+
+ def _detach_event_fixture(self):
+ p = self._queuepool_fixture()
+ canary = Mock()
+ event.listen(p, 'detach', canary)
+
+ return p, canary
+
+ def _close_detached_event_fixture(self):
+ p = self._queuepool_fixture()
+ canary = Mock()
+ event.listen(p, 'close_detached', canary)
+
+ return p, canary
+
+ def test_close(self):
+ p, canary = self._close_event_fixture()
+
+ c1 = p.connect()
+
+ connection = c1.connection
+ rec = c1._connection_record
+
+ c1.close()
+
+ eq_(canary.mock_calls, [])
+
+ p.dispose()
+ eq_(canary.mock_calls, [call(connection, rec)])
+
+ def test_detach(self):
+ p, canary = self._detach_event_fixture()
+
+ c1 = p.connect()
+
+ connection = c1.connection
+ rec = c1._connection_record
+
+ c1.detach()
+
+ eq_(canary.mock_calls, [call(connection, rec)])
+
+ def test_detach_close(self):
+ p, canary = self._close_detached_event_fixture()
+
+ c1 = p.connect()
+
+ connection = c1.connection
+
+ c1.detach()
+
+ c1.close()
+ eq_(canary.mock_calls, [call(connection)])
+
def test_first_connect_event(self):
p, canary = self._first_connect_event_fixture()
diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py
index 7f8a7c97c..7eda6e1b4 100644
--- a/test/engine/test_transaction.py
+++ b/test/engine/test_transaction.py
@@ -218,6 +218,27 @@ class TransactionTest(fixtures.TestBase):
finally:
connection.close()
+ @testing.requires.python2
+ @testing.requires.savepoints_w_release
+ def test_savepoint_release_fails_warning(self):
+ with testing.db.connect() as connection:
+ connection.begin()
+
+ with expect_warnings(
+ "An exception has occurred during handling of a previous "
+ "exception. The previous exception "
+ "is:.*..SQL\:.*RELEASE SAVEPOINT"
+ ):
+ def go():
+ with connection.begin_nested() as savepoint:
+ connection.dialect.do_release_savepoint(
+ connection, savepoint._savepoint)
+ assert_raises_message(
+ exc.DBAPIError,
+ ".*SQL\:.*ROLLBACK TO SAVEPOINT",
+ go
+ )
+
def test_retains_through_options(self):
connection = testing.db.connect()
try:
@@ -1226,6 +1247,8 @@ class IsolationLevelTest(fixtures.TestBase):
return 'READ COMMITTED'
elif testing.against('mysql'):
return "REPEATABLE READ"
+ elif testing.against('mssql'):
+ return "READ COMMITTED"
else:
assert False, "default isolation level not known"
@@ -1236,6 +1259,8 @@ class IsolationLevelTest(fixtures.TestBase):
return 'SERIALIZABLE'
elif testing.against('mysql'):
return "SERIALIZABLE"
+ elif testing.against('mssql'):
+ return "SERIALIZABLE"
else:
assert False, "non default isolation level not known"
diff --git a/test/ext/test_mutable.py b/test/ext/test_mutable.py
index 602ff911a..1e1a75e7e 100644
--- a/test/ext/test_mutable.py
+++ b/test/ext/test_mutable.py
@@ -8,7 +8,7 @@ from sqlalchemy.testing import eq_, assert_raises_message, assert_raises
from sqlalchemy.testing.util import picklers
from sqlalchemy.testing import fixtures
from sqlalchemy.ext.mutable import MutableComposite
-from sqlalchemy.ext.mutable import MutableDict
+from sqlalchemy.ext.mutable import MutableDict, MutableList, MutableSet
class Foo(fixtures.BasicEntity):
@@ -261,6 +261,383 @@ class _MutableDictTestBase(_MutableDictTestFixture):
eq_(f1.non_mutable_data, {'a': 'b'})
+class _MutableListTestFixture(object):
+ @classmethod
+ def _type_fixture(cls):
+ return MutableList
+
+ def teardown(self):
+ # clear out mapper events
+ Mapper.dispatch._clear()
+ ClassManager.dispatch._clear()
+ super(_MutableListTestFixture, self).teardown()
+
+
+class _MutableListTestBase(_MutableListTestFixture):
+ run_define_tables = 'each'
+
+ def setup_mappers(cls):
+ foo = cls.tables.foo
+
+ mapper(Foo, foo)
+
+ def test_coerce_none(self):
+ sess = Session()
+ f1 = Foo(data=None)
+ sess.add(f1)
+ sess.commit()
+ eq_(f1.data, None)
+
+ def test_coerce_raise(self):
+ assert_raises_message(
+ ValueError,
+ "Attribute 'data' does not accept objects of type",
+ Foo, data=set([1, 2, 3])
+ )
+
+ def test_in_place_mutation(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data[0] = 3
+ sess.commit()
+
+ eq_(f1.data, [3, 2])
+
+ def test_in_place_slice_mutation(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2, 3, 4])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data[1:3] = 5, 6
+ sess.commit()
+
+ eq_(f1.data, [1, 5, 6, 4])
+
+ def test_del_slice(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2, 3, 4])
+ sess.add(f1)
+ sess.commit()
+
+ del f1.data[1:3]
+ sess.commit()
+
+ eq_(f1.data, [1, 4])
+
+ def test_clear(self):
+ if not hasattr(list, 'clear'):
+ # py2 list doesn't have 'clear'
+ return
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.clear()
+ sess.commit()
+
+ eq_(f1.data, [])
+
+ def test_pop(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2, 3])
+ sess.add(f1)
+ sess.commit()
+
+ eq_(f1.data.pop(), 3)
+ eq_(f1.data.pop(0), 1)
+ sess.commit()
+
+ assert_raises(IndexError, f1.data.pop, 5)
+
+ eq_(f1.data, [2])
+
+ def test_append(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.append(5)
+ sess.commit()
+
+ eq_(f1.data, [1, 2, 5])
+
+ def test_extend(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.extend([5])
+ sess.commit()
+
+ eq_(f1.data, [1, 2, 5])
+
+ def test_insert(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.insert(1, 5)
+ sess.commit()
+
+ eq_(f1.data, [1, 5, 2])
+
+ def test_remove(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2, 3])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.remove(2)
+ sess.commit()
+
+ eq_(f1.data, [1, 3])
+
+ def test_sort(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 3, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.sort()
+ sess.commit()
+
+ eq_(f1.data, [1, 2, 3])
+
+ def test_reverse(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 3, 2])
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.reverse()
+ sess.commit()
+
+ eq_(f1.data, [2, 3, 1])
+
+ def test_pickle_parent(self):
+ sess = Session()
+
+ f1 = Foo(data=[1, 2])
+ sess.add(f1)
+ sess.commit()
+ f1.data
+ sess.close()
+
+ for loads, dumps in picklers():
+ sess = Session()
+ f2 = loads(dumps(f1))
+ sess.add(f2)
+ f2.data[0] = 3
+ assert f2 in sess.dirty
+
+ def test_unrelated_flush(self):
+ sess = Session()
+ f1 = Foo(data=[1, 2], unrelated_data="unrelated")
+ sess.add(f1)
+ sess.flush()
+ f1.unrelated_data = "unrelated 2"
+ sess.flush()
+ f1.data[0] = 3
+ sess.commit()
+ eq_(f1.data[0], 3)
+
+
+class _MutableSetTestFixture(object):
+ @classmethod
+ def _type_fixture(cls):
+ return MutableSet
+
+ def teardown(self):
+ # clear out mapper events
+ Mapper.dispatch._clear()
+ ClassManager.dispatch._clear()
+ super(_MutableSetTestFixture, self).teardown()
+
+
+class _MutableSetTestBase(_MutableSetTestFixture):
+ run_define_tables = 'each'
+
+ def setup_mappers(cls):
+ foo = cls.tables.foo
+
+ mapper(Foo, foo)
+
+ def test_coerce_none(self):
+ sess = Session()
+ f1 = Foo(data=None)
+ sess.add(f1)
+ sess.commit()
+ eq_(f1.data, None)
+
+ def test_coerce_raise(self):
+ assert_raises_message(
+ ValueError,
+ "Attribute 'data' does not accept objects of type",
+ Foo, data=[1, 2, 3]
+ )
+
+ def test_clear(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.clear()
+ sess.commit()
+
+ eq_(f1.data, set())
+
+ def test_pop(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1]))
+ sess.add(f1)
+ sess.commit()
+
+ eq_(f1.data.pop(), 1)
+ sess.commit()
+
+ assert_raises(KeyError, f1.data.pop)
+
+ eq_(f1.data, set())
+
+ def test_add(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.add(5)
+ sess.commit()
+
+ eq_(f1.data, set([1, 2, 5]))
+
+ def test_update(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.update(set([2, 5]))
+ sess.commit()
+
+ eq_(f1.data, set([1, 2, 5]))
+
+ def test_intersection_update(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.intersection_update(set([2, 5]))
+ sess.commit()
+
+ eq_(f1.data, set([2]))
+
+ def test_difference_update(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.difference_update(set([2, 5]))
+ sess.commit()
+
+ eq_(f1.data, set([1]))
+
+ def test_symmetric_difference_update(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.symmetric_difference_update(set([2, 5]))
+ sess.commit()
+
+ eq_(f1.data, set([1, 5]))
+
+ def test_remove(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2, 3]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.remove(2)
+ sess.commit()
+
+ eq_(f1.data, set([1, 3]))
+
+ def test_discard(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2, 3]))
+ sess.add(f1)
+ sess.commit()
+
+ f1.data.discard(2)
+ sess.commit()
+
+ eq_(f1.data, set([1, 3]))
+
+ f1.data.discard(2)
+ sess.commit()
+
+ eq_(f1.data, set([1, 3]))
+
+ def test_pickle_parent(self):
+ sess = Session()
+
+ f1 = Foo(data=set([1, 2]))
+ sess.add(f1)
+ sess.commit()
+ f1.data
+ sess.close()
+
+ for loads, dumps in picklers():
+ sess = Session()
+ f2 = loads(dumps(f1))
+ sess.add(f2)
+ f2.data.add(3)
+ assert f2 in sess.dirty
+
+ def test_unrelated_flush(self):
+ sess = Session()
+ f1 = Foo(data=set([1, 2]), unrelated_data="unrelated")
+ sess.add(f1)
+ sess.flush()
+ f1.unrelated_data = "unrelated 2"
+ sess.flush()
+ f1.data.add(3)
+ sess.commit()
+ eq_(f1.data, set([1, 2, 3]))
+
+
class MutableColumnDefaultTest(_MutableDictTestFixture, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -349,6 +726,40 @@ class MutableWithScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest):
self._test_non_mutable()
+class MutableListWithScalarPickleTest(_MutableListTestBase, fixtures.MappedTest):
+
+ @classmethod
+ def define_tables(cls, metadata):
+ MutableList = cls._type_fixture()
+
+ mutable_pickle = MutableList.as_mutable(PickleType)
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('skip', mutable_pickle),
+ Column('data', mutable_pickle),
+ Column('non_mutable_data', PickleType),
+ Column('unrelated_data', String(50))
+ )
+
+
+class MutableSetWithScalarPickleTest(_MutableSetTestBase, fixtures.MappedTest):
+
+ @classmethod
+ def define_tables(cls, metadata):
+ MutableSet = cls._type_fixture()
+
+ mutable_pickle = MutableSet.as_mutable(PickleType)
+ Table('foo', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('skip', mutable_pickle),
+ Column('data', mutable_pickle),
+ Column('non_mutable_data', PickleType),
+ Column('unrelated_data', String(50))
+ )
+
+
class MutableAssocWithAttrInheritTest(_MutableDictTestBase,
fixtures.MappedTest):
diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py
index 5f03a613b..3717fafa0 100644
--- a/test/orm/inheritance/test_basic.py
+++ b/test/orm/inheritance/test_basic.py
@@ -2334,7 +2334,7 @@ class OptimizedLoadTest(fixtures.MappedTest):
)
class TransientInheritingGCTest(fixtures.TestBase):
- __requires__ = ('cpython',)
+ __requires__ = ('cpython', 'no_coverage')
def _fixture(self):
Base = declarative_base()
diff --git a/test/orm/inheritance/test_polymorphic_rel.py b/test/orm/inheritance/test_polymorphic_rel.py
index 29fbcff85..c82c30d59 100644
--- a/test/orm/inheritance/test_polymorphic_rel.py
+++ b/test/orm/inheritance/test_polymorphic_rel.py
@@ -1,7 +1,7 @@
from sqlalchemy import func, desc
from sqlalchemy.orm import interfaces, create_session, joinedload, joinedload_all, \
subqueryload, subqueryload_all, aliased,\
- class_mapper
+ class_mapper, with_polymorphic
from sqlalchemy import exc as sa_exc
from sqlalchemy import testing
@@ -1250,6 +1250,44 @@ class _PolymorphicTestBase(object):
assert row.name == 'dilbert'
assert row.primary_language == 'java'
+ def test_correlation_one(self):
+ sess = create_session()
+
+ # unfortunately this pattern can't yet work for PolymorphicAliased
+ # and PolymorphicUnions, because the subquery does not compile
+ # out including the polymorphic selectable; only if Person is in
+ # the query() list does that happen.
+ eq_(sess.query(Person.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == Person.company_id).
+ correlate(Person).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
+
+ def test_correlation_two(self):
+ sess = create_session()
+
+ paliased = aliased(Person)
+
+ eq_(sess.query(paliased.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == paliased.company_id).
+ correlate(paliased).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
+
+ def test_correlation_three(self):
+ sess = create_session()
+
+ paliased = aliased(Person, flat=True)
+
+ eq_(sess.query(paliased.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == paliased.company_id).
+ correlate(paliased).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
+
class PolymorphicTest(_PolymorphicTestBase, _Polymorphic):
def test_join_to_subclass_four(self):
sess = create_session()
@@ -1266,6 +1304,31 @@ class PolymorphicTest(_PolymorphicTestBase, _Polymorphic):
.filter(Machine.name.ilike("%ibm%")).all(),
[e1, e3])
+ def test_correlation_w_polymorphic(self):
+
+ sess = create_session()
+
+ p_poly = with_polymorphic(Person, '*')
+
+ eq_(sess.query(p_poly.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == p_poly.company_id).
+ correlate(p_poly).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
+
+ def test_correlation_w_polymorphic_flat(self):
+
+ sess = create_session()
+
+ p_poly = with_polymorphic(Person, '*', flat=True)
+
+ eq_(sess.query(p_poly.name)
+ .filter(
+ sess.query(Company.name).
+ filter(Company.company_id == p_poly.company_id).
+ correlate(p_poly).as_scalar() == "Elbonia, Inc.").all(),
+ [(e3.name, )])
def test_join_to_subclass_ten(self):
pass
@@ -1377,10 +1440,16 @@ class PolymorphicPolymorphicTest(_PolymorphicTestBase, _PolymorphicPolymorphic):
class PolymorphicUnionsTest(_PolymorphicTestBase, _PolymorphicUnions):
- pass
+
+ @testing.fails()
+ def test_correlation_one(self):
+ super(PolymorphicUnionsTest, self).test_correlation_one()
+
class PolymorphicAliasedJoinsTest(_PolymorphicTestBase, _PolymorphicAliasedJoins):
- pass
+ @testing.fails()
+ def test_correlation_one(self):
+ super(PolymorphicAliasedJoinsTest, self).test_correlation_one()
class PolymorphicJoinsTest(_PolymorphicTestBase, _PolymorphicJoins):
pass
diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py
index 1c3b57690..3ad641b8f 100644
--- a/test/orm/test_eager_relations.py
+++ b/test/orm/test_eager_relations.py
@@ -1,11 +1,11 @@
"""tests of joined-eager loaded attributes"""
-from sqlalchemy.testing import eq_, is_, is_not_
+from sqlalchemy.testing import eq_, is_, is_not_, in_
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.orm import joinedload, deferred, undefer, \
joinedload_all, backref, Session,\
- defaultload, Load, load_only
+ defaultload, Load, load_only, contains_eager
from sqlalchemy import Integer, String, Date, ForeignKey, and_, select, \
func, text
from sqlalchemy.testing.schema import Table, Column
@@ -4178,3 +4178,148 @@ class EnsureColumnsAddedTest(
"ON m2mchild_1.id = parent_to_m2m_1.child_id) "
"ON parent.arb = parent_to_m2m_1.parent_id"
)
+
+
+class EntityViaMultiplePathTestOne(fixtures.DeclarativeMappedTest):
+ """test for [ticket:3431]"""
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+ b_id = Column(ForeignKey('b.id'))
+ c_id = Column(ForeignKey('c.id'))
+
+ b = relationship("B")
+ c = relationship("C")
+
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+ c_id = Column(ForeignKey('c.id'))
+
+ c = relationship("C")
+
+ class C(Base):
+ __tablename__ = 'c'
+ id = Column(Integer, primary_key=True)
+ d_id = Column(ForeignKey('d.id'))
+ d = relationship("D")
+
+ class D(Base):
+ __tablename__ = 'd'
+ id = Column(Integer, primary_key=True)
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'a', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('bid', ForeignKey('b.id'))
+ )
+
+ def test_multi_path_load(self):
+ A, B, C, D = self.classes('A', 'B', 'C', 'D')
+
+ s = Session()
+
+ c = C(d=D())
+
+ s.add(
+ A(b=B(c=c), c=c)
+ )
+ s.commit()
+
+ c_alias_1 = aliased(C)
+ c_alias_2 = aliased(C)
+
+ q = s.query(A)
+ q = q.join(A.b).join(c_alias_1, B.c).join(c_alias_1.d)
+ q = q.options(
+ contains_eager(A.b).
+ contains_eager(B.c, alias=c_alias_1).
+ contains_eager(C.d))
+ q = q.join(c_alias_2, A.c)
+ q = q.options(contains_eager(A.c, alias=c_alias_2))
+
+ a1 = q.all()[0]
+
+ # ensure 'd' key was populated in dict. Varies based on
+ # PYTHONHASHSEED
+ in_('d', a1.c.__dict__)
+
+
+class EntityViaMultiplePathTestTwo(fixtures.DeclarativeMappedTest):
+ """test for [ticket:3431]"""
+
+ @classmethod
+ def setup_classes(cls):
+ Base = cls.DeclarativeBasic
+
+ class User(Base):
+ __tablename__ = 'cs_user'
+
+ id = Column(Integer, primary_key=True)
+ data = Column(Integer)
+
+ class LD(Base):
+ """Child. The column we reference 'A' with is an integer."""
+
+ __tablename__ = 'cs_ld'
+
+ id = Column(Integer, primary_key=True)
+ user_id = Column(Integer, ForeignKey('cs_user.id'))
+ user = relationship(User, primaryjoin=user_id == User.id)
+
+ class A(Base):
+ """Child. The column we reference 'A' with is an integer."""
+
+ __tablename__ = 'cs_a'
+
+ id = Column(Integer, primary_key=True)
+ ld_id = Column(Integer, ForeignKey('cs_ld.id'))
+ ld = relationship(LD, primaryjoin=ld_id == LD.id)
+
+ class LDA(Base):
+ """Child. The column we reference 'A' with is an integer."""
+
+ __tablename__ = 'cs_lda'
+
+ id = Column(Integer, primary_key=True)
+ ld_id = Column(Integer, ForeignKey('cs_ld.id'))
+ a_id = Column(Integer, ForeignKey('cs_a.id'))
+ a = relationship(A, primaryjoin=a_id == A.id)
+ ld = relationship(LD, primaryjoin=ld_id == LD.id)
+
+ def test_multi_path_load(self):
+ User, LD, A, LDA = self.classes('User', 'LD', 'A', 'LDA')
+
+ s = Session()
+
+ u0 = User(data=42)
+ l0 = LD(user=u0)
+ z0 = A(ld=l0)
+ lz0 = LDA(ld=l0, a=z0)
+ s.add_all([
+ u0, l0, z0, lz0
+ ])
+ s.commit()
+
+ l_ac = aliased(LD)
+ u_ac = aliased(User)
+
+ lz_test = (s.query(LDA)
+ .join('ld')
+ .options(contains_eager('ld'))
+ .join('a', (l_ac, 'ld'), (u_ac, 'user'))
+ .options(contains_eager('a')
+ .contains_eager('ld', alias=l_ac)
+ .contains_eager('user', alias=u_ac))
+ .first())
+
+ in_(
+ 'user', lz_test.a.ld.__dict__
+ )
diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py
index 540056dae..e7e943e8d 100644
--- a/test/orm/test_joins.py
+++ b/test/orm/test_joins.py
@@ -455,6 +455,15 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
"FROM users LEFT OUTER JOIN orders ON users.id = orders.user_id"
)
+ def test_full_flag(self):
+ User = self.classes.User
+
+ self.assert_compile(
+ create_session().query(User).outerjoin('orders', full=True),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users FULL OUTER JOIN orders ON users.id = orders.user_id"
+ )
+
def test_multi_tuple_form(self):
"""test the 'tuple' form of join, now superseded
diff --git a/test/orm/test_loading.py b/test/orm/test_loading.py
index f86477ec2..6f3f6a016 100644
--- a/test/orm/test_loading.py
+++ b/test/orm/test_loading.py
@@ -1,8 +1,11 @@
from . import _fixtures
from sqlalchemy.orm import loading, Session, aliased
-from sqlalchemy.testing.assertions import eq_, assert_raises
+from sqlalchemy.testing.assertions import eq_, \
+ assert_raises, assert_raises_message
from sqlalchemy.util import KeyedTuple
from sqlalchemy.testing import mock
+from sqlalchemy import select
+from sqlalchemy import exc
# class GetFromIdentityTest(_fixtures.FixtureTest):
# class LoadOnIdentTest(_fixtures.FixtureTest):
# class InstanceProcessorTest(_fixture.FixtureTest):
@@ -34,6 +37,19 @@ class InstancesTest(_fixtures.FixtureTest):
)
assert cursor.close.called, "Cursor wasn't closed"
+ def test_row_proc_not_created(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User.id, User.name)
+ stmt = select([User.id])
+
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "Could not locate column in row for column 'users.name'",
+ q.from_statement(stmt).all
+ )
+
class MergeResultTest(_fixtures.FixtureTest):
run_setup_mappers = 'once'
diff --git a/test/orm/test_merge.py b/test/orm/test_merge.py
index dab9f4305..f69b07fe8 100644
--- a/test/orm/test_merge.py
+++ b/test/orm/test_merge.py
@@ -6,7 +6,7 @@ from sqlalchemy import testing
from sqlalchemy.util import OrderedSet
from sqlalchemy.orm import mapper, relationship, create_session, \
PropComparator, synonym, comparable_property, sessionmaker, \
- attributes, Session, backref, configure_mappers
+ attributes, Session, backref, configure_mappers, foreign
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.orm.interfaces import MapperOption
from sqlalchemy.testing import eq_, ne_
@@ -451,6 +451,55 @@ class MergeTest(_fixtures.FixtureTest):
eq_(u2.addresses[1].email_address, 'afafds')
eq_(load.called, 21)
+ def test_dont_send_neverset_to_get(self):
+ # test issue #3647
+ CompositePk, composite_pk_table = (
+ self.classes.CompositePk, self.tables.composite_pk_table
+ )
+ mapper(CompositePk, composite_pk_table)
+ cp1 = CompositePk(j=1, k=1)
+
+ sess = Session()
+
+ rec = []
+
+ def go():
+ rec.append(sess.merge(cp1))
+ self.assert_sql_count(testing.db, go, 0)
+ rec[0].i = 5
+ sess.commit()
+ eq_(rec[0].i, 5)
+
+ def test_dont_send_neverset_to_get_w_relationship(self):
+ # test issue #3647
+ CompositePk, composite_pk_table = (
+ self.classes.CompositePk, self.tables.composite_pk_table
+ )
+ User, users = (
+ self.classes.User, self.tables.users
+ )
+ mapper(User, users, properties={
+ 'elements': relationship(
+ CompositePk,
+ primaryjoin=users.c.id == foreign(composite_pk_table.c.i))
+ })
+ mapper(CompositePk, composite_pk_table)
+
+ u1 = User(id=5, name='some user')
+ cp1 = CompositePk(j=1, k=1)
+ u1.elements.append(cp1)
+ sess = Session()
+
+ rec = []
+
+ def go():
+ rec.append(sess.merge(u1))
+ self.assert_sql_count(testing.db, go, 1)
+ u2 = rec[0]
+ sess.commit()
+ eq_(u2.elements[0].i, 5)
+ eq_(u2.id, 5)
+
def test_no_relationship_cascade(self):
"""test that merge doesn't interfere with a relationship()
target that specifically doesn't include 'merge' cascade.
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 6445ffefd..cdc4ac2c2 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -484,7 +484,7 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL):
self.assert_compile(
select([Foo]).where(Foo.foob == 'somename').order_by(Foo.foob),
"SELECT users.id, users.name FROM users "
- "WHERE coalesce(users.name) = :coalesce_1 "
+ "WHERE coalesce(users.name) = :param_1 "
"ORDER BY coalesce(users.name)"
)
@@ -1593,6 +1593,63 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
}, with_polymorphic="*" if polymorphic else None)
mapper(Address, addresses)
+ def _func_fixture(self, label=False):
+ User = self.classes.User
+ users = self.tables.users
+
+ if label:
+ mapper(User, users, properties={
+ "foobar": column_property(
+ func.foob(users.c.name).label(None)
+ )
+ })
+ else:
+ mapper(User, users, properties={
+ "foobar": column_property(
+ func.foob(users.c.name)
+ )
+ })
+
+ def test_anon_label_function_auto(self):
+ self._func_fixture()
+ User = self.classes.User
+
+ s = Session()
+
+ u1 = aliased(User)
+ self.assert_compile(
+ s.query(User.foobar, u1.foobar),
+ "SELECT foob(users.name) AS foob_1, foob(users_1.name) AS foob_2 "
+ "FROM users, users AS users_1"
+ )
+
+ def test_anon_label_function_manual(self):
+ self._func_fixture(label=True)
+ User = self.classes.User
+
+ s = Session()
+
+ u1 = aliased(User)
+ self.assert_compile(
+ s.query(User.foobar, u1.foobar),
+ "SELECT foob(users.name) AS foob_1, foob(users_1.name) AS foob_2 "
+ "FROM users, users AS users_1"
+ )
+
+ def test_anon_label_ad_hoc_labeling(self):
+ self._func_fixture()
+ User = self.classes.User
+
+ s = Session()
+
+ u1 = aliased(User)
+ self.assert_compile(
+ s.query(User.foobar.label('x'), u1.foobar.label('y')),
+ "SELECT foob(users.name) AS x, foob(users_1.name) AS y "
+ "FROM users, users AS users_1"
+ )
+
+
def test_order_by_column_prop_string(self):
User, Address = self.classes("User", "Address")
self._fixture(label=True)
@@ -2695,7 +2752,9 @@ class CountTest(QueryTest):
eq_(q.distinct().count(), 3)
-class DistinctTest(QueryTest):
+class DistinctTest(QueryTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
def test_basic(self):
User = self.classes.User
@@ -2709,19 +2768,22 @@ class DistinctTest(QueryTest):
order_by(desc(User.name)).all()
)
- def test_joined(self):
- """test that orderbys from a joined table get placed into the columns
- clause when DISTINCT is used"""
-
+ def test_columns_augmented_roundtrip_one(self):
User, Address = self.classes.User, self.classes.Address
sess = create_session()
q = sess.query(User).join('addresses').distinct(). \
order_by(desc(Address.email_address))
- assert [User(id=7), User(id=9), User(id=8)] == q.all()
+ eq_(
+ [User(id=7), User(id=9), User(id=8)],
+ q.all()
+ )
- sess.expunge_all()
+ def test_columns_augmented_roundtrip_two(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
# test that it works on embedded joinedload/LIMIT subquery
q = sess.query(User).join('addresses').distinct(). \
@@ -2739,6 +2801,131 @@ class DistinctTest(QueryTest):
] == q.all()
self.assert_sql_count(testing.db, go, 1)
+ def test_columns_augmented_roundtrip_three(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User.id, User.name.label('foo'), Address.id).\
+ filter(User.name == 'jack').\
+ distinct().\
+ order_by(User.id, User.name, Address.email_address)
+
+ # even though columns are added, they aren't in the result
+ eq_(
+ q.all(),
+ [(7, 'jack', 3), (7, 'jack', 4), (7, 'jack', 2),
+ (7, 'jack', 5), (7, 'jack', 1)]
+ )
+ for row in q:
+ eq_(row.keys(), ['id', 'foo', 'id'])
+
+ def test_columns_augmented_sql_one(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User.id, User.name.label('foo'), Address.id).\
+ distinct().\
+ order_by(User.id, User.name, Address.email_address)
+
+ # Address.email_address is added because of DISTINCT,
+ # however User.id, User.name are not b.c. they're already there,
+ # even though User.name is labeled
+ self.assert_compile(
+ q,
+ "SELECT DISTINCT users.id AS users_id, users.name AS foo, "
+ "addresses.id AS addresses_id, "
+ "addresses.email_address AS addresses_email_address FROM users, "
+ "addresses ORDER BY users.id, users.name, addresses.email_address"
+ )
+
+ def test_columns_augmented_sql_two(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User).\
+ options(joinedload(User.addresses)).\
+ distinct().\
+ order_by(User.name, Address.email_address).\
+ limit(5)
+
+ # addresses.email_address is added to inner query so that
+ # it is available in ORDER BY
+ self.assert_compile(
+ q,
+ "SELECT anon_1.users_id AS anon_1_users_id, "
+ "anon_1.users_name AS anon_1_users_name, "
+ "anon_1.addresses_email_address AS "
+ "anon_1_addresses_email_address, "
+ "addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM (SELECT DISTINCT users.id AS users_id, "
+ "users.name AS users_name, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM users, addresses "
+ "ORDER BY users.name, addresses.email_address "
+ "LIMIT :param_1) AS anon_1 LEFT OUTER JOIN "
+ "addresses AS addresses_1 "
+ "ON anon_1.users_id = addresses_1.user_id "
+ "ORDER BY anon_1.users_name, "
+ "anon_1.addresses_email_address, addresses_1.id"
+ )
+
+ def test_columns_augmented_sql_three(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User.id, User.name.label('foo'), Address.id).\
+ distinct(User.name).\
+ order_by(User.id, User.name, Address.email_address)
+
+ # no columns are added when DISTINCT ON is used
+ self.assert_compile(
+ q,
+ "SELECT DISTINCT ON (users.name) users.id AS users_id, "
+ "users.name AS foo, addresses.id AS addresses_id FROM users, "
+ "addresses ORDER BY users.id, users.name, addresses.email_address",
+ dialect='postgresql'
+ )
+
+ def test_columns_augmented_sql_four(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ q = sess.query(User).join('addresses').\
+ distinct(Address.email_address). \
+ options(joinedload('addresses')).\
+ order_by(desc(Address.email_address)).limit(2)
+
+ # but for the subquery / eager load case, we still need to make
+ # the inner columns available for the ORDER BY even though its
+ # a DISTINCT ON
+ self.assert_compile(
+ q,
+ "SELECT anon_1.users_id AS anon_1_users_id, "
+ "anon_1.users_name AS anon_1_users_name, "
+ "anon_1.addresses_email_address AS "
+ "anon_1_addresses_email_address, "
+ "addresses_1.id AS addresses_1_id, "
+ "addresses_1.user_id AS addresses_1_user_id, "
+ "addresses_1.email_address AS addresses_1_email_address "
+ "FROM (SELECT DISTINCT ON (addresses.email_address) "
+ "users.id AS users_id, users.name AS users_name, "
+ "addresses.email_address AS addresses_email_address "
+ "FROM users JOIN addresses ON users.id = addresses.user_id "
+ "ORDER BY addresses.email_address DESC "
+ "LIMIT %(param_1)s) AS anon_1 "
+ "LEFT OUTER JOIN addresses AS addresses_1 "
+ "ON anon_1.users_id = addresses_1.user_id "
+ "ORDER BY anon_1.addresses_email_address DESC, addresses_1.id",
+ dialect='postgresql'
+ )
+
class PrefixWithTest(QueryTest, AssertsCompiledSQL):
diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py
index 7efb5942b..e4ada2292 100644
--- a/test/orm/test_transaction.py
+++ b/test/orm/test_transaction.py
@@ -3,13 +3,13 @@ from sqlalchemy import (
testing, exc as sa_exc, event, String, Column, Table, select, func)
from sqlalchemy.testing import (
fixtures, engines, eq_, assert_raises, assert_raises_message,
- assert_warnings, mock, expect_warnings)
+ assert_warnings, mock, expect_warnings, is_, is_not_)
from sqlalchemy.orm import (
exc as orm_exc, Session, mapper, sessionmaker, create_session,
- relationship, attributes)
+ relationship, attributes, session as _session)
from sqlalchemy.testing.util import gc_collect
from test.orm._fixtures import FixtureTest
-
+from sqlalchemy import inspect
class SessionTransactionTest(FixtureTest):
run_inserts = None
@@ -657,8 +657,8 @@ class SessionTransactionTest(FixtureTest):
assert session.transaction is not None, \
'autocommit=False should start a new transaction'
- @testing.skip_if("oracle", "oracle doesn't support release of savepoint")
- @testing.requires.savepoints
+ @testing.requires.python2
+ @testing.requires.savepoints_w_release
def test_report_primary_error_when_rollback_fails(self):
User, users = self.classes.User, self.tables.users
@@ -666,7 +666,7 @@ class SessionTransactionTest(FixtureTest):
session = Session(testing.db)
- with expect_warnings(".*due to an additional ROLLBACK.*INSERT INTO"):
+ with expect_warnings(".*during handling of a previous exception.*"):
session.begin_nested()
savepoint = session.\
connection()._Connection__transaction._savepoint
@@ -1290,6 +1290,35 @@ class SavepointTest(_LocalFixture):
assert u1 in s
assert u1 not in s.deleted
+ @testing.requires.savepoints_w_release
+ def test_savepoint_lost_still_runs(self):
+ User = self.classes.User
+ s = self.session(bind=self.bind)
+ trans = s.begin_nested()
+ s.connection()
+ u1 = User(name='ed')
+ s.add(u1)
+
+ # kill off the transaction
+ nested_trans = trans._connections[self.bind][1]
+ nested_trans._do_commit()
+
+ is_(s.transaction, trans)
+ assert_raises(
+ sa_exc.DBAPIError,
+ s.rollback
+ )
+
+ assert u1 not in s.new
+
+ is_(trans._state, _session.CLOSED)
+ is_not_(s.transaction, trans)
+ is_(s.transaction._state, _session.ACTIVE)
+
+ is_(s.transaction.nested, False)
+
+ is_(s.transaction._parent, None)
+
class AccountingFlagsTest(_LocalFixture):
__backend__ = True
@@ -1518,6 +1547,30 @@ class NaturalPKRollbackTest(fixtures.MappedTest):
session.rollback()
+ def test_reloaded_deleted_checked_for_expiry(self):
+ """test issue #3677"""
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+
+ u1 = User(name='u1')
+
+ s = Session()
+ s.add(u1)
+ s.flush()
+ del u1
+ gc_collect()
+
+ u1 = s.query(User).first() # noqa
+
+ s.rollback()
+
+ u2 = User(name='u1')
+ s.add(u2)
+ s.commit()
+
+ assert inspect(u2).persistent
+
def test_key_replaced_by_update(self):
users, User = self.tables.users, self.classes.User
diff --git a/test/requirements.py b/test/requirements.py
index 522a376e0..9e041709c 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -225,7 +225,7 @@ class DefaultRequirements(SuiteRequirements):
@property
def isolation_level(self):
return only_on(
- ('postgresql', 'sqlite', 'mysql'),
+ ('postgresql', 'sqlite', 'mysql', 'mssql'),
"DBAPI has no isolation level support"
) + fails_on('postgresql+pypostgresql',
'pypostgresql bombs on multiple isolation level calls')
@@ -286,6 +286,10 @@ class DefaultRequirements(SuiteRequirements):
("mysql", "<", (5, 0, 3)),
], "savepoints not supported")
+ @property
+ def savepoints_w_release(self):
+ return self.savepoints + skip_if(
+ "oracle", "oracle doesn't support release of savepoint")
@property
def schemas(self):
@@ -528,7 +532,7 @@ class DefaultRequirements(SuiteRequirements):
"""Target driver reflects the name of primary key constraints."""
return fails_on_everything_except('postgresql', 'oracle', 'mssql',
- 'sybase')
+ 'sybase', 'sqlite')
@property
def json_type(self):
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 85a9f77bc..dae178d31 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -638,6 +638,21 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"myothertable.otherid = :otherid_2)) AS anon_1"
)
+ self.assert_compile(
+ select([exists([1])]),
+ "SELECT EXISTS (SELECT 1) AS anon_1"
+ )
+
+ self.assert_compile(
+ select([~exists([1])]),
+ "SELECT NOT (EXISTS (SELECT 1)) AS anon_1"
+ )
+
+ self.assert_compile(
+ select([~(~exists([1]))]),
+ "SELECT NOT (NOT (EXISTS (SELECT 1))) AS anon_1"
+ )
+
def test_where_subquery(self):
s = select([addresses.c.street], addresses.c.user_id
== users.c.user_id, correlate=True).alias('s')
@@ -1538,6 +1553,26 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"mytable.myid = :myid_1 OR myothertable.othername != :othername_1 "
"OR EXISTS (select yay from foo where boo = lar)", )
+ def test_full_outer_join(self):
+ for spec in [
+ join(table1, table2, table1.c.myid == table2.c.otherid, full=True),
+ outerjoin(
+ table1, table2,
+ table1.c.myid == table2.c.otherid, full=True),
+ table1.join(
+ table2,
+ table1.c.myid == table2.c.otherid, full=True),
+ table1.outerjoin(
+ table2,
+ table1.c.myid == table2.c.otherid, full=True),
+ ]:
+ stmt = select([table1]).select_from(spec)
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, mytable.name, mytable.description FROM "
+ "mytable FULL OUTER JOIN myothertable "
+ "ON mytable.myid = myothertable.otherid")
+
def test_compound_selects(self):
assert_raises_message(
exc.ArgumentError,
@@ -2714,6 +2749,31 @@ class KwargPropagationTest(fixtures.TestBase):
class CRUDTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
+ def test_insert_literal_binds(self):
+ stmt = table1.insert().values(myid=3, name='jack')
+
+ self.assert_compile(
+ stmt,
+ "INSERT INTO mytable (myid, name) VALUES (3, 'jack')",
+ literal_binds=True)
+
+ def test_update_literal_binds(self):
+ stmt = table1.update().values(name='jack').\
+ where(table1.c.name == 'jill')
+
+ self.assert_compile(
+ stmt,
+ "UPDATE mytable SET name='jack' WHERE mytable.name = 'jill'",
+ literal_binds=True)
+
+ def test_delete_literal_binds(self):
+ stmt = table1.delete().where(table1.c.name == 'jill')
+
+ self.assert_compile(
+ stmt,
+ "DELETE FROM mytable WHERE mytable.name = 'jill'",
+ literal_binds=True)
+
def test_correlated_update(self):
# test against a straight text subquery
u = update(
@@ -2873,48 +2933,6 @@ class CRUDTest(fixtures.TestBase, AssertsCompiledSQL):
'x2': 1,
'y': 2})
- def test_unconsumed_names(self):
- t = table("t", column("x"), column("y"))
- t2 = table("t2", column("q"), column("z"))
- assert_raises_message(
- exc.CompileError,
- "Unconsumed column names: z",
- t.insert().values(x=5, z=5).compile,
- )
- assert_raises_message(
- exc.CompileError,
- "Unconsumed column names: z",
- t.update().values(x=5, z=5).compile,
- )
-
- assert_raises_message(
- exc.CompileError,
- "Unconsumed column names: j",
- t.update().values(x=5, j=7).values({t2.c.z: 5}).
- where(t.c.x == t2.c.q).compile,
- )
-
- # bindparam names don't get counted
- i = t.insert().values(x=3 + bindparam('x2'))
- self.assert_compile(
- i,
- "INSERT INTO t (x) VALUES ((:param_1 + :x2))"
- )
-
- # even if in the params list
- i = t.insert().values(x=3 + bindparam('x2'))
- self.assert_compile(
- i,
- "INSERT INTO t (x) VALUES ((:param_1 + :x2))",
- params={"x2": 1}
- )
-
- assert_raises_message(
- exc.CompileError,
- "Unconsumed column names: j",
- t.update().values(x=5, j=7).compile,
- column_keys=['j']
- )
def test_labels_no_collision(self):
@@ -3830,3 +3848,66 @@ class ResultMapTest(fixtures.TestBase):
(table1.c.description, 'description', 'description'),
table1.c.description.type)}
)
+
+ def test_select_wraps_for_translate_ambiguity(self):
+ # test for issue #3657
+ t = table('a', column('x'), column('y'), column('z'))
+
+ l1, l2, l3 = t.c.z.label('a'), t.c.x.label('b'), t.c.x.label('c')
+ orig = [t.c.x, t.c.y, l1, l2, l3]
+ stmt = select(orig)
+ wrapped = stmt._generate()
+ wrapped = wrapped.column(
+ func.ROW_NUMBER().over(order_by=t.c.z)).alias()
+
+ wrapped_again = select([c for c in wrapped.c])
+
+ compiled = wrapped_again.compile(
+ compile_kwargs={'select_wraps_for': stmt})
+
+ proxied = [obj[0] for (k, n, obj, type_) in compiled._result_columns]
+ for orig_obj, proxied_obj in zip(
+ orig,
+ proxied
+ ):
+ is_(orig_obj, proxied_obj)
+
+ def test_select_wraps_for_translate_ambiguity_dupe_cols(self):
+ # test for issue #3657
+ t = table('a', column('x'), column('y'), column('z'))
+
+ l1, l2, l3 = t.c.z.label('a'), t.c.x.label('b'), t.c.x.label('c')
+ orig = [t.c.x, t.c.y, l1, l2, l3]
+
+ # create the statement with some duplicate columns. right now
+ # the behavior is that these redundant columns are deduped.
+ stmt = select([t.c.x, t.c.y, l1, t.c.y, l2, t.c.x, l3])
+
+ # so the statement has 7 inner columns...
+ eq_(len(list(stmt.inner_columns)), 7)
+
+ # but only exposes 5 of them, the other two are dupes of x and y
+ eq_(len(stmt.c), 5)
+
+ # and when it generates a SELECT it will also render only 5
+ eq_(len(stmt._columns_plus_names), 5)
+
+ wrapped = stmt._generate()
+ wrapped = wrapped.column(
+ func.ROW_NUMBER().over(order_by=t.c.z)).alias()
+
+ # so when we wrap here we're going to have only 5 columns
+ wrapped_again = select([c for c in wrapped.c])
+
+ # so the compiler logic that matches up the "wrapper" to the
+ # "select_wraps_for" can't use inner_columns to match because
+ # these collections are not the same
+ compiled = wrapped_again.compile(
+ compile_kwargs={'select_wraps_for': stmt})
+
+ proxied = [obj[0] for (k, n, obj, type_) in compiled._result_columns]
+ for orig_obj, proxied_obj in zip(
+ orig,
+ proxied
+ ):
+ is_(orig_obj, proxied_obj)
diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py
index b59914afc..aa674403e 100644
--- a/test/sql/test_cte.py
+++ b/test/sql/test_cte.py
@@ -1,6 +1,6 @@
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import AssertsCompiledSQL, assert_raises_message
-from sqlalchemy.sql import table, column, select, func, literal
+from sqlalchemy.sql import table, column, select, func, literal, exists, and_
from sqlalchemy.dialects import mssql
from sqlalchemy.engine import default
from sqlalchemy.exc import CompileError
@@ -8,7 +8,7 @@ from sqlalchemy.exc import CompileError
class CTETest(fixtures.TestBase, AssertsCompiledSQL):
- __dialect__ = 'default'
+ __dialect__ = 'default_enhanced'
def test_nonrecursive(self):
orders = table('orders',
@@ -492,3 +492,151 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL):
'regional_sales WHERE "order"."order" > regional_sales."order"',
dialect='postgresql'
)
+
+ def test_upsert_from_select(self):
+ orders = table(
+ 'orders',
+ column('region'),
+ column('amount'),
+ column('product'),
+ column('quantity')
+ )
+
+ upsert = (
+ orders.update()
+ .where(orders.c.region == 'Region1')
+ .values(amount=1.0, product='Product1', quantity=1)
+ .returning(*(orders.c._all_columns)).cte('upsert'))
+
+ insert = orders.insert().from_select(
+ orders.c.keys(),
+ select([
+ literal('Region1'), literal(1.0),
+ literal('Product1'), literal(1)
+ ]).where(~exists(upsert.select()))
+ )
+
+ self.assert_compile(
+ insert,
+ "WITH upsert AS (UPDATE orders SET amount=:amount, "
+ "product=:product, quantity=:quantity "
+ "WHERE orders.region = :region_1 "
+ "RETURNING orders.region, orders.amount, "
+ "orders.product, orders.quantity) "
+ "INSERT INTO orders (region, amount, product, quantity) "
+ "SELECT :param_1 AS anon_1, :param_2 AS anon_2, "
+ ":param_3 AS anon_3, :param_4 AS anon_4 WHERE NOT (EXISTS "
+ "(SELECT upsert.region, upsert.amount, upsert.product, "
+ "upsert.quantity FROM upsert))"
+ )
+
+ def test_pg_example_one(self):
+ products = table('products', column('id'), column('date'))
+ products_log = table('products_log', column('id'), column('date'))
+
+ moved_rows = products.delete().where(and_(
+ products.c.date >= 'dateone',
+ products.c.date < 'datetwo')).returning(*products.c).\
+ cte('moved_rows')
+
+ stmt = products_log.insert().from_select(
+ products_log.c, moved_rows.select())
+ self.assert_compile(
+ stmt,
+ "WITH moved_rows AS "
+ "(DELETE FROM products WHERE products.date >= :date_1 "
+ "AND products.date < :date_2 "
+ "RETURNING products.id, products.date) "
+ "INSERT INTO products_log (id, date) "
+ "SELECT moved_rows.id, moved_rows.date FROM moved_rows"
+ )
+
+ def test_pg_example_two(self):
+ products = table('products', column('id'), column('price'))
+
+ t = products.update().values(price='someprice').\
+ returning(*products.c).cte('t')
+ stmt = t.select()
+
+ self.assert_compile(
+ stmt,
+ "WITH t AS "
+ "(UPDATE products SET price=:price "
+ "RETURNING products.id, products.price) "
+ "SELECT t.id, t.price "
+ "FROM t"
+ )
+
+ def test_pg_example_three(self):
+
+ parts = table(
+ 'parts',
+ column('part'),
+ column('sub_part'),
+ )
+
+ included_parts = select([
+ parts.c.sub_part,
+ parts.c.part]).\
+ where(parts.c.part == 'our part').\
+ cte("included_parts", recursive=True)
+
+ pr = included_parts.alias('pr')
+ p = parts.alias('p')
+ included_parts = included_parts.union_all(
+ select([
+ p.c.sub_part,
+ p.c.part]).
+ where(p.c.part == pr.c.sub_part)
+ )
+ stmt = parts.delete().where(
+ parts.c.part.in_(select([included_parts.c.part]))).returning(
+ parts.c.part)
+
+ # the outer RETURNING is a bonus over what PG's docs have
+ self.assert_compile(
+ stmt,
+ "WITH RECURSIVE included_parts(sub_part, part) AS "
+ "(SELECT parts.sub_part AS sub_part, parts.part AS part "
+ "FROM parts "
+ "WHERE parts.part = :part_1 "
+ "UNION ALL SELECT p.sub_part AS sub_part, p.part AS part "
+ "FROM parts AS p, included_parts AS pr "
+ "WHERE p.part = pr.sub_part) "
+ "DELETE FROM parts WHERE parts.part IN "
+ "(SELECT included_parts.part FROM included_parts) "
+ "RETURNING parts.part"
+ )
+
+ def test_insert_in_the_cte(self):
+ products = table('products', column('id'), column('price'))
+
+ cte = products.insert().values(id=1, price=27.0).\
+ returning(*products.c).cte('pd')
+
+ stmt = select([cte])
+
+ self.assert_compile(
+ stmt,
+ "WITH pd AS "
+ "(INSERT INTO products (id, price) VALUES (:id, :price) "
+ "RETURNING products.id, products.price) "
+ "SELECT pd.id, pd.price "
+ "FROM pd"
+ )
+
+ def test_update_pulls_from_cte(self):
+ products = table('products', column('id'), column('price'))
+
+ cte = products.select().cte('pd')
+
+ stmt = products.update().where(products.c.price == cte.c.price)
+
+ self.assert_compile(
+ stmt,
+ "WITH pd AS "
+ "(SELECT products.id AS id, products.price AS price "
+ "FROM products) "
+ "UPDATE products SET id=:id, price=:price FROM pd "
+ "WHERE products.price = pd.price"
+ )
diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py
index ea4de032c..f2515c4eb 100644
--- a/test/sql/test_insert.py
+++ b/test/sql/test_insert.py
@@ -1,14 +1,13 @@
#! coding:utf-8
from sqlalchemy import Column, Integer, MetaData, String, Table,\
- bindparam, exc, func, insert, select, column, text
+ bindparam, exc, func, insert, select, column, text, table
from sqlalchemy.dialects import mysql, postgresql
from sqlalchemy.engine import default
from sqlalchemy.testing import AssertsCompiledSQL,\
assert_raises_message, fixtures, eq_
from sqlalchemy.sql import crud
-
class _InsertTestBase(object):
@classmethod
@@ -55,6 +54,69 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'INSERT INTO mytable (myid, name) VALUES (:myid, :name)',
checkparams=checkparams)
+ def test_unconsumed_names_kwargs(self):
+ t = table("t", column("x"), column("y"))
+ assert_raises_message(
+ exc.CompileError,
+ "Unconsumed column names: z",
+ t.insert().values(x=5, z=5).compile,
+ )
+
+ def test_bindparam_name_no_consume_error(self):
+ t = table("t", column("x"), column("y"))
+ # bindparam names don't get counted
+ i = t.insert().values(x=3 + bindparam('x2'))
+ self.assert_compile(
+ i,
+ "INSERT INTO t (x) VALUES ((:param_1 + :x2))"
+ )
+
+ # even if in the params list
+ i = t.insert().values(x=3 + bindparam('x2'))
+ self.assert_compile(
+ i,
+ "INSERT INTO t (x) VALUES ((:param_1 + :x2))",
+ params={"x2": 1}
+ )
+
+ def test_unconsumed_names_values_dict(self):
+ table1 = self.tables.mytable
+
+ checkparams = {
+ 'myid': 3,
+ 'name': 'jack',
+ 'unknowncol': 'oops'
+ }
+
+ stmt = insert(table1, values=checkparams)
+ assert_raises_message(
+ exc.CompileError,
+ 'Unconsumed column names: unknowncol',
+ stmt.compile,
+ dialect=postgresql.dialect()
+ )
+
+ def test_unconsumed_names_multi_values_dict(self):
+ table1 = self.tables.mytable
+
+ checkparams = [{
+ 'myid': 3,
+ 'name': 'jack',
+ 'unknowncol': 'oops'
+ }, {
+ 'myid': 4,
+ 'name': 'someone',
+ 'unknowncol': 'oops'
+ }]
+
+ stmt = insert(table1, values=checkparams)
+ assert_raises_message(
+ exc.CompileError,
+ 'Unconsumed column names: unknowncol',
+ stmt.compile,
+ dialect=postgresql.dialect()
+ )
+
def test_insert_with_values_tuple(self):
table1 = self.tables.mytable
@@ -188,9 +250,10 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
from_select(("otherid", "othername"), sel)
self.assert_compile(
ins,
- "INSERT INTO myothertable (otherid, othername) WITH anon_1 AS "
+ "WITH anon_1 AS "
"(SELECT mytable.name AS name FROM mytable "
"WHERE mytable.name = :name_1) "
+ "INSERT INTO myothertable (otherid, othername) "
"SELECT mytable.myid, mytable.name FROM mytable, anon_1 "
"WHERE mytable.name = anon_1.name",
checkparams={"name_1": "bar"}
@@ -205,9 +268,9 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
self.assert_compile(
ins,
- "INSERT INTO mytable (myid, name, description) "
"WITH c AS (SELECT mytable.myid AS myid, mytable.name AS name, "
"mytable.description AS description FROM mytable) "
+ "INSERT INTO mytable (myid, name, description) "
"SELECT c.myid, c.name, c.description FROM c"
)
diff --git a/test/sql/test_lateral.py b/test/sql/test_lateral.py
new file mode 100644
index 000000000..301d78aae
--- /dev/null
+++ b/test/sql/test_lateral.py
@@ -0,0 +1,134 @@
+from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import AssertsCompiledSQL, assert_raises_message
+from sqlalchemy.sql import select, func
+from sqlalchemy.engine import default
+from sqlalchemy import exc
+from sqlalchemy import Table, Integer, String, ForeignKey, Column, true
+from sqlalchemy import lateral, outerjoin, join
+
+
+class LateralTest(fixtures.TablesTest, AssertsCompiledSQL):
+ __dialect__ = default.DefaultDialect(supports_native_boolean=True)
+
+ run_setup_bind = None
+
+ run_create_tables = None
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('people', metadata,
+ Column('people_id', Integer, primary_key=True),
+ Column('age', Integer),
+ Column('name', String(30)))
+ Table('bookcases', metadata,
+ Column('bookcase_id', Integer, primary_key=True),
+ Column(
+ 'bookcase_owner_id',
+ Integer, ForeignKey('people.people_id')),
+ Column('bookcase_shelves', Integer),
+ Column('bookcase_width', Integer))
+ Table('books', metadata,
+ Column('book_id', Integer, primary_key=True),
+ Column(
+ 'bookcase_id', Integer, ForeignKey('bookcases.bookcase_id')),
+ Column('book_owner_id', Integer, ForeignKey('people.people_id')),
+ Column('book_weight', Integer))
+
+ def test_standalone(self):
+ table1 = self.tables.people
+ subq = select([table1.c.people_id])
+
+ # alias name is not rendered because subquery is not
+ # in the context of a FROM clause
+ self.assert_compile(
+ lateral(subq, name="alias"),
+ 'LATERAL (SELECT people.people_id FROM people)'
+ )
+
+ self.assert_compile(
+ subq.lateral(name="alias"),
+ 'LATERAL (SELECT people.people_id FROM people)'
+ )
+
+ def test_select_from(self):
+ table1 = self.tables.people
+ subq = select([table1.c.people_id])
+
+ # in a FROM context, now you get "AS alias" and column labeling
+ self.assert_compile(
+ select([subq.lateral(name='alias')]),
+ 'SELECT alias.people_id FROM LATERAL '
+ '(SELECT people.people_id AS people_id FROM people) AS alias'
+ )
+
+ def test_plain_join(self):
+ table1 = self.tables.people
+ table2 = self.tables.books
+ subq = select([table2.c.book_id]).\
+ where(table2.c.book_owner_id == table1.c.people_id)
+
+ # FROM books, people? isn't this wrong? No! Because
+ # this is only a fragment, books isn't in any other FROM clause
+ self.assert_compile(
+ join(table1, lateral(subq, name="alias"), true()),
+ "people JOIN LATERAL (SELECT books.book_id AS book_id "
+ "FROM books, people WHERE books.book_owner_id = people.people_id) "
+ "AS alias ON true"
+ )
+
+ # put it in correct context, implicit correlation works fine
+ self.assert_compile(
+ select([table1]).select_from(
+ join(table1, lateral(subq, name="alias"), true())
+ ),
+ "SELECT people.people_id, people.age, people.name "
+ "FROM people JOIN LATERAL (SELECT books.book_id AS book_id "
+ "FROM books WHERE books.book_owner_id = people.people_id) "
+ "AS alias ON true"
+ )
+
+ # explicit correlation
+ subq = subq.correlate(table1)
+ self.assert_compile(
+ select([table1]).select_from(
+ join(table1, lateral(subq, name="alias"), true())
+ ),
+ "SELECT people.people_id, people.age, people.name "
+ "FROM people JOIN LATERAL (SELECT books.book_id AS book_id "
+ "FROM books WHERE books.book_owner_id = people.people_id) "
+ "AS alias ON true"
+ )
+
+ def test_join_lateral_w_select_subquery(self):
+ table1 = self.tables.people
+ table2 = self.tables.books
+
+ subq = select([table2.c.book_id]).\
+ correlate(table1).\
+ where(table1.c.people_id == table2.c.book_owner_id).lateral()
+ stmt = select([table1, subq.c.book_id]).\
+ select_from(table1.join(subq, true()))
+
+ self.assert_compile(
+ stmt,
+ "SELECT people.people_id, people.age, people.name, anon_1.book_id "
+ "FROM people JOIN LATERAL (SELECT books.book_id AS book_id "
+ "FROM books "
+ "WHERE people.people_id = books.book_owner_id) AS anon_1 ON true"
+ )
+
+ def test_from_function(self):
+ bookcases = self.tables.bookcases
+ srf = lateral(func.generate_series(1, bookcases.c.bookcase_shelves))
+
+ self.assert_compile(
+ select([bookcases]).select_from(bookcases.join(srf, true())),
+ "SELECT bookcases.bookcase_id, bookcases.bookcase_owner_id, "
+ "bookcases.bookcase_shelves, bookcases.bookcase_width "
+ "FROM bookcases JOIN "
+ "LATERAL generate_series(:generate_series_1, "
+ "bookcases.bookcase_shelves) AS anon_1 ON true"
+ )
+
+
+
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index 6a6c749a4..86286a9a3 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -15,7 +15,8 @@ from sqlalchemy.sql.elements import _literal_as_text
from sqlalchemy.schema import Column, Table, MetaData
from sqlalchemy.sql import compiler
from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, \
- Boolean, NullType, MatchType, Indexable, Concatenable, ARRAY, JSON
+ Boolean, NullType, MatchType, Indexable, Concatenable, ARRAY, JSON, \
+ DateTime
from sqlalchemy.dialects import mysql, firebird, postgresql, oracle, \
sqlite, mssql
from sqlalchemy import util
@@ -265,6 +266,18 @@ class DefaultColumnComparatorTest(fixtures.TestBase):
expr.operator, operator.add
)
+ def test_contains_override_raises(self):
+ for col in [
+ Column('x', String),
+ Column('x', Integer),
+ Column('x', DateTime)
+ ]:
+ assert_raises_message(
+ NotImplementedError,
+ "Operator 'contains' is not supported on this expression",
+ lambda: 'foo' in col
+ )
+
class CustomUnaryOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -820,6 +833,15 @@ class ArrayIndexOpTest(fixtures.TestBase, testing.AssertsCompiledSQL):
checkparams={'x_1': 5}
)
+ def test_contains_override_raises(self):
+ col = Column('x', self.MyType())
+
+ assert_raises_message(
+ NotImplementedError,
+ "Operator 'contains' is not supported on this expression",
+ lambda: 'foo' in col
+ )
+
def test_getindex_sqlexpr(self):
col = Column('x', self.MyType())
diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py
index bd2b8c0ae..aaeb82fa4 100644
--- a/test/sql/test_resultset.py
+++ b/test/sql/test_resultset.py
@@ -204,7 +204,8 @@ class ResultProxyTest(fixtures.TablesTest):
lambda: result[0][addresses.c.address_id])
def test_column_error_printing(self):
- row = testing.db.execute(select([1])).first()
+ result = testing.db.execute(select([1]))
+ row = result.first()
class unprintable(object):
@@ -222,6 +223,14 @@ class ResultProxyTest(fixtures.TablesTest):
assert_raises_message(
exc.NoSuchColumnError,
msg % repl,
+ result._getter, accessor
+ )
+
+ is_(result._getter(accessor, False), None)
+
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ msg % repl,
lambda: row[accessor]
)
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index 7203cc5a3..94e4ac024 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -2217,6 +2217,33 @@ class ResultMapTest(fixtures.TestBase):
[Boolean]
)
+ def test_plain_exists(self):
+ expr = exists([1])
+ eq_(type(expr.type), Boolean)
+ eq_(
+ [type(entry[-1]) for
+ entry in select([expr]).compile()._result_columns],
+ [Boolean]
+ )
+
+ def test_plain_exists_negate(self):
+ expr = ~exists([1])
+ eq_(type(expr.type), Boolean)
+ eq_(
+ [type(entry[-1]) for
+ entry in select([expr]).compile()._result_columns],
+ [Boolean]
+ )
+
+ def test_plain_exists_double_negate(self):
+ expr = ~(~exists([1]))
+ eq_(type(expr.type), Boolean)
+ eq_(
+ [type(entry[-1]) for
+ entry in select([expr]).compile()._result_columns],
+ [Boolean]
+ )
+
def test_column_subquery_plain(self):
t = self._fixture()
s1 = select([t.c.x]).where(t.c.x > 5).as_scalar()
diff --git a/test/sql/test_text.py b/test/sql/test_text.py
index 78c3282ac..20cb2a6fb 100644
--- a/test/sql/test_text.py
+++ b/test/sql/test_text.py
@@ -281,6 +281,17 @@ class BindParamTest(fixtures.TestBase, AssertsCompiledSQL):
dialect="postgresql"
)
+ def test_escaping_double_colons(self):
+ self.assert_compile(
+ text(
+ "SELECT * FROM pg_attribute WHERE "
+ "attrelid = :tab\:\:regclass"),
+ "SELECT * FROM pg_attribute WHERE "
+ "attrelid = %(tab)s::regclass",
+ params={'tab': None},
+ dialect="postgresql"
+ )
+
def test_text_in_select_nonfrom(self):
generate_series = text("generate_series(:x, :y, :z) as s(a)").\
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index b08556926..3d527b261 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -15,6 +15,7 @@ from sqlalchemy.sql import ddl
from sqlalchemy.sql import visitors
from sqlalchemy import inspection
from sqlalchemy import exc, types, util, dialects
+from sqlalchemy.util import OrderedDict
for name in dialects.__all__:
__import__("sqlalchemy.dialects.%s" % name)
from sqlalchemy.sql import operators, column, table, null
@@ -30,6 +31,8 @@ from sqlalchemy.testing import fixtures
from sqlalchemy.testing import mock
+
+
class AdaptTest(fixtures.TestBase):
def _all_dialect_modules(self):
@@ -181,6 +184,7 @@ class AdaptTest(fixtures.TestBase):
eq_(types.String().python_type, str)
eq_(types.Unicode().python_type, util.text_type)
eq_(types.String(convert_unicode=True).python_type, util.text_type)
+ eq_(types.Enum('one', 'two', 'three').python_type, str)
assert_raises(
NotImplementedError,
@@ -277,7 +281,6 @@ class PickleTypesTest(fixtures.TestBase):
Column('Lar', LargeBinary()),
Column('Pic', PickleType()),
Column('Int', Interval()),
- Column('Enu', Enum('x', 'y', 'z', name="somename")),
]
for column_type in column_types:
meta = MetaData()
@@ -1087,41 +1090,128 @@ class UnicodeTest(fixtures.TestBase):
unicodedata.encode('ascii', 'ignore').decode()
)
-enum_table = non_native_enum_table = metadata = None
+class EnumTest(AssertsCompiledSQL, fixtures.TablesTest):
+ __backend__ = True
+
+ class SomeEnum(object):
+ # Implements PEP 435 in the minimal fashion needed by SQLAlchemy
+ __members__ = OrderedDict()
+
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+ self.__members__[name] = self
+ setattr(self.__class__, name, self)
-class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
+ one = SomeEnum('one', 1)
+ two = SomeEnum('two', 2)
+ three = SomeEnum('three', 3)
@classmethod
- def setup_class(cls):
- global enum_table, non_native_enum_table, metadata
- metadata = MetaData(testing.db)
- enum_table = Table(
+ def define_tables(cls, metadata):
+ Table(
'enum_table', metadata, Column("id", Integer, primary_key=True),
Column('someenum', Enum('one', 'two', 'three', name='myenum'))
)
- non_native_enum_table = Table(
+ Table(
'non_native_enum_table', metadata,
Column("id", Integer, primary_key=True),
Column('someenum', Enum('one', 'two', 'three', native_enum=False)),
+ Column('someotherenum',
+ Enum('one', 'two', 'three',
+ create_constraint=False, native_enum=False)),
)
- metadata.create_all()
+ Table(
+ 'stdlib_enum_table', metadata,
+ Column("id", Integer, primary_key=True),
+ Column('someenum', Enum(cls.SomeEnum))
+ )
- def teardown(self):
- enum_table.delete().execute()
- non_native_enum_table.delete().execute()
+ def test_python_type(self):
+ eq_(types.Enum(self.SomeEnum).python_type, self.SomeEnum)
+
+ def test_pickle_types(self):
+ global SomeEnum
+ SomeEnum = self.SomeEnum
+ for loads, dumps in picklers():
+ column_types = [
+ Column('Enu', Enum('x', 'y', 'z', name="somename")),
+ Column('En2', Enum(self.SomeEnum)),
+ ]
+ for column_type in column_types:
+ meta = MetaData()
+ Table('foo', meta, column_type)
+ loads(dumps(column_type))
+ loads(dumps(meta))
+
+ def test_validators_pep435(self):
+ type_ = Enum(self.SomeEnum)
+
+ bind_processor = type_.bind_processor(testing.db.dialect)
+ eq_(bind_processor('one'), "one")
+ eq_(bind_processor(self.one), "one")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ bind_processor, "foo"
+ )
+
+ result_processor = type_.result_processor(testing.db.dialect, None)
+
+ eq_(result_processor('one'), self.one)
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ result_processor, "foo"
+ )
+
+ literal_processor = type_.literal_processor(testing.db.dialect)
+ eq_(literal_processor("one"), "'one'")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ literal_processor, "foo"
+ )
+
+ def test_validators_plain(self):
+ type_ = Enum("one", "two")
+
+ bind_processor = type_.bind_processor(testing.db.dialect)
+ eq_(bind_processor('one'), "one")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ bind_processor, "foo"
+ )
+
+ result_processor = type_.result_processor(testing.db.dialect, None)
+
+ eq_(result_processor('one'), "one")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ result_processor, "foo"
+ )
+
+ literal_processor = type_.literal_processor(testing.db.dialect)
+ eq_(literal_processor("one"), "'one'")
+ assert_raises_message(
+ LookupError,
+ '"foo" is not among the defined enum values',
+ literal_processor, "foo"
+ )
- @classmethod
- def teardown_class(cls):
- metadata.drop_all()
@testing.fails_on(
'postgresql+zxjdbc',
'zxjdbc fails on ENUM: column "XXX" is of type XXX '
'but expression is of type character varying')
def test_round_trip(self):
+ enum_table = self.tables['enum_table']
+
enum_table.insert().execute([
{'id': 1, 'someenum': 'two'},
{'id': 2, 'someenum': 'two'},
@@ -1137,7 +1227,51 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
]
)
+ def test_null_round_trip(self):
+ enum_table = self.tables.enum_table
+ non_native_enum_table = self.tables.non_native_enum_table
+
+ with testing.db.connect() as conn:
+ conn.execute(enum_table.insert(), {"id": 1, "someenum": None})
+ eq_(conn.scalar(select([enum_table.c.someenum])), None)
+
+ with testing.db.connect() as conn:
+ conn.execute(
+ non_native_enum_table.insert(), {"id": 1, "someenum": None})
+ eq_(conn.scalar(select([non_native_enum_table.c.someenum])), None)
+
+
+ @testing.fails_on(
+ 'mysql',
+ "The CHECK clause is parsed but ignored by all storage engines.")
+ @testing.fails_on(
+ 'mssql', "FIXME: MS-SQL 2005 doesn't honor CHECK ?!?")
+ def test_check_constraint(self):
+ assert_raises(
+ (exc.IntegrityError, exc.ProgrammingError),
+ testing.db.execute,
+ "insert into non_native_enum_table "
+ "(id, someenum) values(1, 'four')")
+
+ def test_skip_check_constraint(self):
+ with testing.db.connect() as conn:
+ conn.execute(
+ "insert into non_native_enum_table "
+ "(id, someotherenum) values(1, 'four')"
+ )
+ eq_(
+ conn.scalar("select someotherenum from non_native_enum_table"),
+ "four")
+ assert_raises_message(
+ LookupError,
+ '"four" is not among the defined enum values',
+ conn.scalar,
+ select([self.tables.non_native_enum_table.c.someotherenum])
+ )
+
def test_non_native_round_trip(self):
+ non_native_enum_table = self.tables['non_native_enum_table']
+
non_native_enum_table.insert().execute([
{'id': 1, 'someenum': 'two'},
{'id': 2, 'someenum': 'two'},
@@ -1145,7 +1279,9 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
])
eq_(
- non_native_enum_table.select().
+ select([
+ non_native_enum_table.c.id,
+ non_native_enum_table.c.someenum]).
order_by(non_native_enum_table.c.id).execute().fetchall(),
[
(1, 'two'),
@@ -1154,6 +1290,25 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
]
)
+ def test_pep435_enum_round_trip(self):
+ stdlib_enum_table = self.tables['stdlib_enum_table']
+
+ stdlib_enum_table.insert().execute([
+ {'id': 1, 'someenum': self.SomeEnum.two},
+ {'id': 2, 'someenum': self.SomeEnum.two},
+ {'id': 3, 'someenum': self.SomeEnum.one},
+ ])
+
+ eq_(
+ stdlib_enum_table.select().
+ order_by(stdlib_enum_table.c.id).execute().fetchall(),
+ [
+ (1, self.SomeEnum.two),
+ (2, self.SomeEnum.two),
+ (3, self.SomeEnum.one),
+ ]
+ )
+
def test_adapt(self):
from sqlalchemy.dialects.postgresql import ENUM
e1 = Enum('one', 'two', 'three', native_enum=False)
@@ -1163,6 +1318,9 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
e1 = Enum('one', 'two', 'three', name='foo', schema='bar')
eq_(e1.adapt(ENUM).name, 'foo')
eq_(e1.adapt(ENUM).schema, 'bar')
+ e1 = Enum(self.SomeEnum)
+ eq_(e1.adapt(ENUM).name, 'someenum')
+ eq_(e1.adapt(ENUM).enums, ['one', 'two', 'three'])
@testing.provide_metadata
def test_create_metadata_bound_no_crash(self):
@@ -1171,13 +1329,6 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
m1.create_all(testing.db)
- @testing.crashes(
- 'mysql', 'Inconsistent behavior across various OS/drivers')
- def test_constraint(self):
- assert_raises(
- exc.DBAPIError, enum_table.insert().execute,
- {'id': 4, 'someenum': 'four'})
-
def test_non_native_constraint_custom_type(self):
class Foob(object):
@@ -1209,12 +1360,10 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
dialect="default"
)
- @testing.fails_on(
- 'mysql',
- "the CHECK constraint doesn't raise an exception for unknown reason")
- def test_non_native_constraint(self):
+ def test_lookup_failure(self):
assert_raises(
- exc.DBAPIError, non_native_enum_table.insert().execute,
+ exc.StatementError,
+ self.tables['non_native_enum_table'].insert().execute,
{'id': 4, 'someenum': 'four'}
)
diff --git a/test/sql/test_update.py b/test/sql/test_update.py
index 3ab580b11..872671008 100644
--- a/test/sql/test_update.py
+++ b/test/sql/test_update.py
@@ -1,8 +1,10 @@
-from sqlalchemy import *
+from sqlalchemy import Integer, String, ForeignKey, and_, or_, func, \
+ literal, update, table, bindparam, column, select, exc
from sqlalchemy import testing
from sqlalchemy.dialects import mysql
from sqlalchemy.engine import default
-from sqlalchemy.testing import AssertsCompiledSQL, eq_, fixtures
+from sqlalchemy.testing import AssertsCompiledSQL, eq_, fixtures, \
+ assert_raises_message
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy import util
@@ -188,6 +190,36 @@ class UpdateTest(_UpdateFromTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'mytable.myid = hoho(:hoho_1) AND '
'mytable.name = :param_2 || mytable.name || :param_3')
+ def test_unconsumed_names_kwargs(self):
+ t = table("t", column("x"), column("y"))
+
+ assert_raises_message(
+ exc.CompileError,
+ "Unconsumed column names: z",
+ t.update().values(x=5, z=5).compile,
+ )
+
+ def test_unconsumed_names_values_dict(self):
+ t = table("t", column("x"), column("y"))
+ t2 = table("t2", column("q"), column("z"))
+
+ assert_raises_message(
+ exc.CompileError,
+ "Unconsumed column names: j",
+ t.update().values(x=5, j=7).values({t2.c.z: 5}).
+ where(t.c.x == t2.c.q).compile,
+ )
+
+ def test_unconsumed_names_kwargs_w_keys(self):
+ t = table("t", column("x"), column("y"))
+
+ assert_raises_message(
+ exc.CompileError,
+ "Unconsumed column names: j",
+ t.update().values(x=5, j=7).compile,
+ column_keys=['j']
+ )
+
def test_update_ordered_parameters_1(self):
table1 = self.tables.mytable
diff --git a/tox.ini b/tox.ini
index 7db7d63c4..7d5826a4b 100644
--- a/tox.ini
+++ b/tox.ini
@@ -16,12 +16,12 @@ usedevelop=
deps=pytest
pytest-xdist
mock
- postgresql: .[postgresql]
- mysql: .[mysql]
- mysql: .[pymysql]
- oracle: .[oracle]
- mssql: .[pyodbc]
- mssql: .[pymssql]
+ postgresql: psycopg2
+ mysql: mysqlclient
+ mysql: pymysql
+ oracle: cx_oracle
+ mssql: pyodbc
+ mssql: pymssql
cov: pytest-cov
whitelist_externals=sh
@@ -40,14 +40,16 @@ whitelist_externals=sh
setenv=
PYTHONPATH=
PYTHONNOUSERSITE=1
- BASECOMMAND=python -m pytest -n4 --dropfirst
+ BASECOMMAND=python -m pytest --dropfirst
+ WORKERS=-n4
+ oracle: WORKERS=-n2
nocext: DISABLE_SQLALCHEMY_CEXT=1
cov: COVERAGE={[testenv]cov_args}
sqlite: SQLITE=--db sqlite
postgresql: POSTGRESQL=--db postgresql
mysql: MYSQL=--db mysql --db pymysql
- oracle: ORACLE=--db oracle
+ oracle: ORACLE=--db oracle --low-connections
mssql: MSSQL=--db pyodbc --db pymssql
backendonly: BACKENDONLY=--backend-only
@@ -59,7 +61,9 @@ passenv=ORACLE_HOME NLS_LANG
# for nocext, we rm *.so in lib in case we are doing usedevelop=True
commands=
{nocext}: sh -c "rm -f lib/sqlalchemy/*.so"
- {env:BASECOMMAND} {env:SQLITE:} {env:POSTGRESQL:} {env:MYSQL:} {env:ORACLE:} {env:MSSQL:} {env:BACKENDONLY:} {env:COVERAGE:} {posargs}
+ {env:BASECOMMAND} {env:WORKERS} {env:SQLITE:} {env:POSTGRESQL:} {env:MYSQL:} {env:ORACLE:} {env:MSSQL:} {env:BACKENDONLY:} {env:COVERAGE:} {posargs}
+ {oracle}: python reap_oracle_dbs.py
+
[testenv:pep8]
deps=flake8
@@ -67,6 +71,6 @@ commands = python -m flake8 {posargs}
[flake8]
show-source = True
-ignore = E711,E712,E721,N806
+ignore = E711,E712,E721,N806,D
exclude=.venv,.git,.tox,dist,doc,*egg,build