summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/build/changelog/changelog_09.rst137
-rw-r--r--doc/build/changelog/changelog_10.rst271
-rw-r--r--doc/build/changelog/migration_10.rst766
-rw-r--r--doc/build/core/defaults.rst4
-rw-r--r--doc/build/core/sqlelement.rst5
-rw-r--r--doc/build/dialects/sqlite.rst7
-rw-r--r--doc/build/faq.rst106
-rw-r--r--doc/build/orm/inheritance.rst21
-rw-r--r--doc/build/orm/loading.rst26
-rw-r--r--doc/build/orm/relationships.rst135
-rw-r--r--doc/build/orm/tutorial.rst7
-rw-r--r--examples/generic_associations/discriminator_on_association.py1
-rw-r--r--lib/sqlalchemy/__init__.py1
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py32
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqlconnector.py48
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py3
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py62
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py115
-rw-r--r--lib/sqlalchemy/dialects/sqlite/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py4
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlcipher.py116
-rw-r--r--lib/sqlalchemy/engine/__init__.py11
-rw-r--r--lib/sqlalchemy/engine/base.py2
-rw-r--r--lib/sqlalchemy/engine/reflection.py47
-rw-r--r--lib/sqlalchemy/engine/strategies.py1
-rw-r--r--lib/sqlalchemy/events.py2
-rw-r--r--lib/sqlalchemy/exc.py27
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py2
-rw-r--r--lib/sqlalchemy/orm/mapper.py4
-rw-r--r--lib/sqlalchemy/orm/path_registry.py10
-rw-r--r--lib/sqlalchemy/orm/persistence.py55
-rw-r--r--lib/sqlalchemy/orm/query.py39
-rw-r--r--lib/sqlalchemy/orm/relationships.py56
-rw-r--r--lib/sqlalchemy/orm/session.py108
-rw-r--r--lib/sqlalchemy/orm/state.py15
-rw-r--r--lib/sqlalchemy/orm/strategies.py2
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py8
-rw-r--r--lib/sqlalchemy/orm/util.py51
-rw-r--r--lib/sqlalchemy/pool.py20
-rw-r--r--lib/sqlalchemy/sql/__init__.py1
-rw-r--r--lib/sqlalchemy/sql/compiler.py24
-rw-r--r--lib/sqlalchemy/sql/crud.py97
-rw-r--r--lib/sqlalchemy/sql/dml.py26
-rw-r--r--lib/sqlalchemy/sql/elements.py154
-rw-r--r--lib/sqlalchemy/sql/expression.py10
-rw-r--r--lib/sqlalchemy/sql/functions.py31
-rw-r--r--lib/sqlalchemy/sql/schema.py143
-rw-r--r--lib/sqlalchemy/sql/selectable.py5
-rw-r--r--lib/sqlalchemy/testing/engines.py4
-rw-r--r--lib/sqlalchemy/testing/exclusions.py3
-rw-r--r--lib/sqlalchemy/testing/plugin/plugin_base.py5
-rw-r--r--lib/sqlalchemy/testing/profiling.py14
-rw-r--r--lib/sqlalchemy/testing/provision.py10
-rw-r--r--lib/sqlalchemy/testing/suite/test_insert.py37
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py13
-rw-r--r--lib/sqlalchemy/util/_collections.py9
-rw-r--r--lib/sqlalchemy/util/langhelpers.py10
-rw-r--r--test/aaa_profiling/test_memusage.py26
-rw-r--r--test/base/test_except.py123
-rw-r--r--test/base/test_utils.py67
-rw-r--r--test/dialect/mssql/test_reflection.py4
-rw-r--r--test/dialect/mysql/test_reflection.py32
-rw-r--r--test/dialect/mysql/test_types.py6
-rw-r--r--test/dialect/postgresql/test_reflection.py63
-rw-r--r--test/dialect/test_oracle.py22
-rw-r--r--test/engine/test_execute.py41
-rw-r--r--test/engine/test_logging.py8
-rw-r--r--test/ext/declarative/test_inheritance.py29
-rw-r--r--test/orm/inheritance/test_single.py206
-rw-r--r--test/orm/test_assorted_eager.py4
-rw-r--r--test/orm/test_bind.py413
-rw-r--r--test/orm/test_cascade.py8
-rw-r--r--test/orm/test_joins.py39
-rw-r--r--test/orm/test_mapper.py3
-rw-r--r--test/orm/test_of_type.py16
-rw-r--r--test/orm/test_relationships.py2094
-rw-r--r--test/orm/test_session.py253
-rw-r--r--test/orm/test_unitofwork.py9
-rw-r--r--test/orm/test_update_delete.py183
-rw-r--r--test/profiles.txt104
-rw-r--r--test/requirements.py10
-rw-r--r--test/sql/test_compiler.py58
-rw-r--r--test/sql/test_defaults.py65
-rw-r--r--test/sql/test_functions.py112
-rw-r--r--test/sql/test_generative.py18
-rw-r--r--test/sql/test_insert.py82
-rw-r--r--test/sql/test_join_rewriting.py25
-rw-r--r--test/sql/test_metadata.py124
-rw-r--r--test/sql/test_operators.py21
-rw-r--r--test/sql/test_query.py28
-rw-r--r--test/sql/test_returning.py33
-rw-r--r--test/sql/test_selectable.py24
93 files changed, 5433 insertions, 1847 deletions
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst
index e3d9175cb..ef0277935 100644
--- a/doc/build/changelog/changelog_09.rst
+++ b/doc/build/changelog/changelog_09.rst
@@ -11,7 +11,144 @@
:start-line: 5
.. changelog::
+ :version: 0.9.9
+
+ .. change::
+ :tags: bug, examples
+ :versions: 1.0.0
+
+ Fixed a bug in the examples/generic_assocaitions/discriminator_on_association.py
+ example, where the subclasses of AddressAssociation were not being
+ mapped as "single table inheritance", leading to problems when trying
+ to use the mappings further.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 1.0.0
+ :tickets: 3251
+
+ Fixed a leak which would occur in the unsupported and highly
+ non-recommended use case of replacing a relationship on a fixed
+ mapped class many times, referring to an arbitrarily growing number of
+ target mappers. A warning is emitted when the old relationship is
+ replaced, however if the mapping were already used for querying, the
+ old relationship would still be referenced within some registries.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 1.0.0
+ :tickets: 3248
+
+ Fixed issue where the columns from a SELECT embedded in an
+ INSERT, either through the values clause or as a "from select",
+ would pollute the column types used in the result set produced by
+ the RETURNING clause when columns from both statements shared the
+ same name, leading to potential errors or mis-adaptation when
+ retrieving the returning rows.
+
+ .. change::
+ :tags: bug, orm, sqlite
+ :versions: 1.0.0
+ :tickets: 3241
+
+ Fixed bug regarding expression mutations which could express
+ itself as a "Could not locate column" error when using
+ :class:`.Query` to select from multiple, anonymous column
+ entities when querying against SQLite, as a side effect of the
+ "join rewriting" feature used by the SQLite dialect.
+
+ .. change::
+ :tags: feature, sqlite
+ :versions: 1.0.0
+
+ Added a new SQLite backend for the SQLCipher backend. This backend
+ provides for encrypted SQLite databases using the pysqlcipher Python
+ driver, which is very similar to the pysqlite driver.
+
+ .. seealso::
+
+ :mod:`~sqlalchemy.dialects.sqlite.pysqlcipher`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3232
+ :versions: 1.0.0
+
+ Fixed bug where the ON clause for :meth:`.Query.join`,
+ and :meth:`.Query.outerjoin` to a single-inheritance subclass
+ using ``of_type()`` would not render the "single table criteria" in
+ the ON clause if the ``from_joinpoint=True`` flag were set.
+
+.. changelog::
:version: 0.9.8
+ :released: October 13, 2014
+
+ .. change::
+ :tags: bug, mysql, mysqlconnector
+ :versions: 1.0.0
+
+ Mysqlconnector as of version 2.0, probably as a side effect of
+ the python 3 merge, now does not expect percent signs (e.g.
+ as used as the modulus operator and others) to be doubled,
+ even when using the "pyformat" bound parameter format (this
+ change is not documented by Mysqlconnector). The dialect now
+ checks for py2k and for mysqlconnector less than version 2.0
+ when detecting if the modulus operator should be rendered as
+ ``%%`` or ``%``.
+
+ .. change::
+ :tags: bug, mysql, mysqlconnector
+ :versions: 1.0.0
+
+ Unicode SQL is now passed for MySQLconnector version 2.0 and above;
+ for Py2k and MySQL < 2.0, strings are encoded.
+
+
+ .. change::
+ :tags: bug, oracle
+ :versions: 1.0.0
+ :tickets: 2138
+
+ Fixed long-standing bug in Oracle dialect where bound parameter
+ names that started with numbers would not be quoted, as Oracle
+ doesn't like numerics in bound parameter names.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 1.0.0
+ :tickets: 3195
+
+ Fixed bug where a fair number of SQL elements within
+ the sql package would fail to ``__repr__()`` successfully,
+ due to a missing ``description`` attribute that would then invoke
+ a recursion overflow when an internal AttributeError would then
+ re-invoke ``__repr__()``.
+
+ .. change::
+ :tags: bug, declarative, orm
+ :versions: 1.0.0
+ :tickets: 3185
+
+ Fixed "'NoneType' object has no attribute 'concrete'" error
+ when using :class:`.AbstractConcreteBase` in conjunction with
+ a subclass that declares ``__abstract__``.
+
+ .. change::
+ :tags: bug, engine
+ :versions: 1.0.0
+ :tickets: 3200
+
+ The execution options passed to an :class:`.Engine` either via
+ :paramref:`.create_engine.execution_options` or
+ :meth:`.Engine.update_execution_options` are not passed to the
+ special :class:`.Connection` used to initialize the dialect
+ within the "first connect" event; dialects will usually
+ perform their own queries in this phase, and none of the
+ current available options should be applied here. In
+ particular, the "autocommit" option was causing an attempt to
+ autocommit within this initial connect which would fail with
+ an AttributeError due to the non-standard state of the
+ :class:`.Connection`.
.. change::
:tags: bug, sqlite
diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst
index 4d5ab1f06..d0d025011 100644
--- a/doc/build/changelog/changelog_10.rst
+++ b/doc/build/changelog/changelog_10.rst
@@ -22,6 +22,277 @@
on compatibility concerns, see :doc:`/changelog/migration_10`.
.. change::
+ :tags: bug, sql
+ :tickets: 3243
+
+ The behavioral contract of the :attr:`.ForeignKeyConstraint.columns`
+ collection has been made consistent; this attribute is now a
+ :class:`.ColumnCollection` like that of all other constraints and
+ is initialized at the point when the constraint is associated with
+ a :class:`.Table`.
+
+ .. seealso::
+
+ :ref:`change_3243`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3256
+
+ The :meth:`.PropComparator.of_type` modifier has been
+ improved in conjunction with loader directives such as
+ :func:`.joinedload` and :func:`.contains_eager` such that if
+ two :meth:`.PropComparator.of_type` modifiers of the same
+ base type/path are encountered, they will be joined together
+ into a single "polymorphic" entity, rather than replacing
+ the entity of type A with the one of type B. E.g.
+ a joinedload of ``A.b.of_type(BSub1)->BSub1.c`` combined with
+ joinedload of ``A.b.of_type(BSub2)->BSub2.c`` will create a
+ single joinedload of ``A.b.of_type((BSub1, BSub2)) -> BSub1.c, BSub2.c``,
+ without the need for the ``with_polymorphic`` to be explicit
+ in the query.
+
+ .. seealso::
+
+ :ref:`eagerloading_polymorphic_subtypes` - contains an updated
+ example illustrating the new format.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3245
+
+ The :attr:`.Column.key` attribute is now used as the source of
+ anonymous bound parameter names within expressions, to match the
+ existing use of this value as the key when rendered in an INSERT
+ or UPDATE statement. This allows :attr:`.Column.key` to be used
+ as a "substitute" string to work around a difficult column name
+ that doesn't translate well into a bound parameter name. Note that
+ the paramstyle is configurable on :func:`.create_engine` in any case,
+ and most DBAPIs today support a named and positional style.
+
+ .. change::
+ :tags: bug, sql
+ :pullreq: github:146
+
+ Fixed the name of the :paramref:`.PoolEvents.reset.dbapi_connection`
+ parameter as passed to this event; in particular this affects
+ usage of the "named" argument style for this event. Pull request
+ courtesy Jason Goldberger.
+
+ .. change::
+ :tags: feature, sql
+ :pullreq: github:139
+
+ Added a new parameter :paramref:`.Table.tometadata.name` to
+ the :meth:`.Table.tometadata` method. Similar to
+ :paramref:`.Table.tometadata.schema`, this argument causes the newly
+ copied :class:`.Table` to take on the new name instead of
+ the existing one. An interesting capability this adds is that of
+ copying a :class:`.Table` object to the *same* :class:`.MetaData`
+ target with a new name. Pull request courtesy n.d. parker.
+
+ .. change::
+ :tags: bug, orm
+ :pullreq: github:137
+
+ Repaired support of the ``copy.deepcopy()`` call when used by the
+ :class:`.orm.util.CascadeOptions` argument, which occurs
+ if ``copy.deepcopy()`` is being used with :func:`.relationship`
+ (not an officially supported use case). Pull request courtesy
+ duesenfranz.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3170
+
+ Reversing a change that was made in 0.9, the "singleton" nature
+ of the "constants" :func:`.null`, :func:`.true`, and :func:`.false`
+ has been reverted. These functions returning a "singleton" object
+ had the effect that different instances would be treated as the
+ same regardless of lexical use, which in particular would impact
+ the rendering of the columns clause of a SELECT statement.
+
+ .. seealso::
+
+ :ref:`bug_3170`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3139
+
+ Fixed bug where :meth:`.Session.expunge` would not fully detach
+ the given object if the object had been subject to a delete
+ operation that was flushed, but not committed. This would also
+ affect related operations like :func:`.make_transient`.
+
+ .. seealso::
+
+ :ref:`bug_3139`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3230
+
+ A warning is emitted in the case of multiple relationships that
+ ultimately will populate a foreign key column in conflict with
+ another, where the relationships are attempting to copy values
+ from different source columns. This occurs in the case where
+ composite foreign keys with overlapping columns are mapped to
+ relationships that each refer to a different referenced column.
+ A new documentation section illustrates the example as well as how
+ to overcome the issue by specifying "foreign" columns specifically
+ on a per-relationship basis.
+
+ .. seealso::
+
+ :ref:`relationship_overlapping_foreignkeys`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3172
+
+ Exception messages have been spiffed up a bit. The SQL statement
+ and parameters are not displayed if None, reducing confusion for
+ error messages that weren't related to a statement. The full
+ module and classname for the DBAPI-level exception is displayed,
+ making it clear that this is a wrapped DBAPI exception. The
+ statement and parameters themselves are bounded within a bracketed
+ sections to better isolate them from the error message and from
+ each other.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3228
+
+ The :meth:`.Query.update` method will now convert string key
+ names in the given dictionary of values into mapped attribute names
+ against the mapped class being updated. Previously, string names
+ were taken in directly and passed to the core update statement without
+ any means to resolve against the mapped entity. Support for synonyms
+ and hybrid attributes as the subject attributes of
+ :meth:`.Query.update` are also supported.
+
+ .. seealso::
+
+ :ref:`bug_3228`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3035
+
+ Improvements to the mechanism used by :class:`.Session` to locate
+ "binds" (e.g. engines to use), such engines can be associated with
+ mixin classes, concrete subclasses, as well as a wider variety
+ of table metadata such as joined inheritance tables.
+
+ .. seealso::
+
+ :ref:`bug_3035`
+
+ .. change::
+ :tags: bug, general
+ :tickets: 3218
+
+ The ``__module__`` attribute is now set for all those SQL and
+ ORM functions that are derived as "public factory" symbols, which
+ should assist with documentation tools being able to report on the
+ target module.
+
+ .. change::
+ :tags: feature, sql
+
+ :meth:`.Insert.from_select` now includes Python and SQL-expression
+ defaults if otherwise unspecified; the limitation where non-
+ server column defaults aren't included in an INSERT FROM
+ SELECT is now lifted and these expressions are rendered as
+ constants into the SELECT statement.
+
+ .. seealso::
+
+ :ref:`feature_insert_from_select_defaults`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3233
+
+ Fixed bug in single table inheritance where a chain of joins
+ that included the same single inh entity more than once
+ (normally this should raise an error) could, in some cases
+ depending on what was being joined "from", implicitly alias the
+ second case of the single inh entity, producing
+ a query that "worked". But as this implicit aliasing is not
+ intended in the case of single table inheritance, it didn't
+ really "work" fully and was very misleading, since it wouldn't
+ always appear.
+
+ .. seealso::
+
+ :ref:`bug_3233`
+
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3222
+
+ The ON clause rendered when using :meth:`.Query.join`,
+ :meth:`.Query.outerjoin`, or the standalone :func:`.orm.join` /
+ :func:`.orm.outerjoin` functions to a single-inheritance subclass will
+ now include the "single table criteria" in the ON clause even
+ if the ON clause is otherwise hand-rolled; it is now added to the
+ criteria using AND, the same way as if joining to a single-table
+ target using relationship or similar.
+
+ This is sort of in-between feature and bug.
+
+ .. seealso::
+
+ :ref:`migration_3222`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3184
+ :pullreq: bitbucket:30
+
+ The :class:`.UniqueConstraint` construct is now included when
+ reflecting a :class:`.Table` object, for databases where this
+ is applicable. In order to achieve this
+ with sufficient accuracy, MySQL and Postgresql now contain features
+ that correct for the duplication of indexes and unique constraints
+ when reflecting tables, indexes, and constraints.
+ In the case of MySQL, there is not actually a "unique constraint"
+ concept independent of a "unique index", so for this backend
+ :class:`.UniqueConstraint` continues to remain non-present for a
+ reflected :class:`.Table`. For Postgresql, the query used to
+ detect indexes against ``pg_index`` has been improved to check for
+ the same construct in ``pg_constraint``, and the implicitly
+ constructed unique index is not included with a
+ reflected :class:`.Table`.
+
+ In both cases, the :meth:`.Inspector.get_indexes` and the
+ :meth:`.Inspector.get_unique_constraints` methods return both
+ constructs individually, but include a new token
+ ``duplicates_constraint`` in the case of Postgresql or
+ ``duplicates_index`` in the case
+ of MySQL to indicate when this condition is detected.
+ Pull request courtesy Johannes Erdfelt.
+
+ .. seealso::
+
+ :ref:`feature_3184`
+
+ .. change::
+ :tags: feature, postgresql
+ :pullreq: github:134
+
+ Added support for the FILTER keyword as applied to aggregate
+ functions, supported by Postgresql 9.4. Pull request
+ courtesy Ilja Everilä.
+
+ .. seealso::
+
+ :ref:`feature_gh134`
+
+ .. change::
:tags: bug, sql, engine
:tickets: 3215
diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst
index 0e9dd8d7b..c4157266b 100644
--- a/doc/build/changelog/migration_10.rst
+++ b/doc/build/changelog/migration_10.rst
@@ -8,7 +8,7 @@ What's New in SQLAlchemy 1.0?
undergoing maintenance releases as of May, 2014,
and SQLAlchemy version 1.0, as of yet unreleased.
- Document last updated: September 25, 2014
+ Document last updated: October 23, 2014
Introduction
============
@@ -25,6 +25,141 @@ potentially backwards-incompatible changes.
New Features
============
+.. _feature_3150:
+
+Improvements to declarative mixins, ``@declared_attr`` and related features
+----------------------------------------------------------------------------
+
+The declarative system in conjunction with :class:`.declared_attr` has been
+overhauled to support new capabilities.
+
+A function decorated with :class:`.declared_attr` is now called only **after**
+any mixin-based column copies are generated. This means the function can
+call upon mixin-established columns and will receive a reference to the correct
+:class:`.Column` object::
+
+ class HasFooBar(object):
+ foobar = Column(Integer)
+
+ @declared_attr
+ def foobar_prop(cls):
+ return column_property('foobar: ' + cls.foobar)
+
+ class SomeClass(HasFooBar, Base):
+ __tablename__ = 'some_table'
+ id = Column(Integer, primary_key=True)
+
+Above, ``SomeClass.foobar_prop`` will be invoked against ``SomeClass``,
+and ``SomeClass.foobar`` will be the final :class:`.Column` object that is
+to be mapped to ``SomeClass``, as opposed to the non-copied object present
+directly on ``HasFooBar``, even though the columns aren't mapped yet.
+
+The :class:`.declared_attr` function now **memoizes** the value
+that's returned on a per-class basis, so that repeated calls to the same
+attribute will return the same value. We can alter the example to illustrate
+this::
+
+ class HasFooBar(object):
+ @declared_attr
+ def foobar(cls):
+ return Column(Integer)
+
+ @declared_attr
+ def foobar_prop(cls):
+ return column_property('foobar: ' + cls.foobar)
+
+ class SomeClass(HasFooBar, Base):
+ __tablename__ = 'some_table'
+ id = Column(Integer, primary_key=True)
+
+Previously, ``SomeClass`` would be mapped with one particular copy of
+the ``foobar`` column, but the ``foobar_prop`` by calling upon ``foobar``
+a second time would produce a different column. The value of
+``SomeClass.foobar`` is now memoized during declarative setup time, so that
+even before the attribute is mapped by the mapper, the interim column
+value will remain consistent no matter how many times the
+:class:`.declared_attr` is called upon.
+
+The two behaviors above should help considerably with declarative definition
+of many types of mapper properties that derive from other attributes, where
+the :class:`.declared_attr` function is called upon from other
+:class:`.declared_attr` functions locally present before the class is
+actually mapped.
+
+For a pretty slim edge case where one wishes to build a declarative mixin
+that establishes distinct columns per subclass, a new modifier
+:attr:`.declared_attr.cascading` is added. With this modifier, the
+decorated function will be invoked individually for each class in the
+mapped inheritance hierarchy. While this is already the behavior for
+special attributes such as ``__table_args__`` and ``__mapper_args__``,
+for columns and other properties the behavior by default assumes that attribute
+is affixed to the base class only, and just inherited from subclasses.
+With :attr:`.declared_attr.cascading`, individual behaviors can be
+applied::
+
+ class HasSomeAttribute(object):
+ @declared_attr.cascading
+ def some_id(cls):
+ if has_inherited_table(cls):
+ return Column(ForeignKey('myclass.id'), primary_key=True)
+ else:
+ return Column(Integer, primary_key=True)
+
+ return Column('id', Integer, primary_key=True)
+
+ class MyClass(HasSomeAttribute, Base):
+ ""
+ # ...
+
+ class MySubClass(MyClass):
+ ""
+ # ...
+
+.. seealso::
+
+ :ref:`mixin_inheritance_columns`
+
+Finally, the :class:`.AbstractConcreteBase` class has been reworked
+so that a relationship or other mapper property can be set up inline
+on the abstract base::
+
+ from sqlalchemy import Column, Integer, ForeignKey
+ from sqlalchemy.orm import relationship
+ from sqlalchemy.ext.declarative import (declarative_base, declared_attr,
+ AbstractConcreteBase)
+
+ Base = declarative_base()
+
+ class Something(Base):
+ __tablename__ = u'something'
+ id = Column(Integer, primary_key=True)
+
+
+ class Abstract(AbstractConcreteBase, Base):
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def something_id(cls):
+ return Column(ForeignKey(Something.id))
+
+ @declared_attr
+ def something(cls):
+ return relationship(Something)
+
+
+ class Concrete(Abstract):
+ __tablename__ = u'cca'
+ __mapper_args__ = {'polymorphic_identity': 'cca', 'concrete': True}
+
+
+The above mapping will set up a table ``cca`` with both an ``id`` and
+a ``something_id`` column, and ``Concrete`` will also have a relationship
+``something``. The new feature is that ``Abstract`` will also have an
+independently configured relationship ``something`` that builds against
+the polymorphic union of the base.
+
+:ticket:`3150` :ticket:`2670` :ticket:`3149` :ticket:`2952` :ticket:`3050`
+
.. _feature_3034:
Select/Query LIMIT / OFFSET may be specified as an arbitrary SQL expression
@@ -50,6 +185,170 @@ wishes to support the new feature should now call upon the ``._limit_clause``
and ``._offset_clause`` attributes to receive the full SQL expression, rather
than the integer value.
+.. _change_2051:
+
+.. _feature_insert_from_select_defaults:
+
+INSERT FROM SELECT now includes Python and SQL-expression defaults
+-------------------------------------------------------------------
+
+:meth:`.Insert.from_select` now includes Python and SQL-expression defaults if
+otherwise unspecified; the limitation where non-server column defaults
+aren't included in an INSERT FROM SELECT is now lifted and these
+expressions are rendered as constants into the SELECT statement::
+
+ from sqlalchemy import Table, Column, MetaData, Integer, select, func
+
+ m = MetaData()
+
+ t = Table(
+ 't', m,
+ Column('x', Integer),
+ Column('y', Integer, default=func.somefunction()))
+
+ stmt = select([t.c.x])
+ print t.insert().from_select(['x'], stmt)
+
+Will render::
+
+ INSERT INTO t (x, y) SELECT t.x, somefunction() AS somefunction_1
+ FROM t
+
+The feature can be disabled using
+:paramref:`.Insert.from_select.include_defaults`.
+
+New Postgresql Table options
+-----------------------------
+
+Added support for PG table options TABLESPACE, ON COMMIT,
+WITH(OUT) OIDS, and INHERITS, when rendering DDL via
+the :class:`.Table` construct.
+
+.. seealso::
+
+ :ref:`postgresql_table_options`
+
+:ticket:`2051`
+
+.. _feature_get_enums:
+
+New get_enums() method with Postgresql Dialect
+----------------------------------------------
+
+The :func:`.inspect` method returns a :class:`.PGInspector` object in the
+case of Postgresql, which includes a new :meth:`.PGInspector.get_enums`
+method that returns information on all available ``ENUM`` types::
+
+ from sqlalchemy import inspect, create_engine
+
+ engine = create_engine("postgresql+psycopg2://host/dbname")
+ insp = inspect(engine)
+ print(insp.get_enums())
+
+.. seealso::
+
+ :meth:`.PGInspector.get_enums`
+
+.. _feature_2891:
+
+Postgresql Dialect reflects Materialized Views, Foreign Tables
+--------------------------------------------------------------
+
+Changes are as follows:
+
+* the :class:`Table` construct with ``autoload=True`` will now match a name
+ that exists in the database as a materialized view or foriegn table.
+
+* :meth:`.Inspector.get_view_names` will return plain and materialized view
+ names.
+
+* :meth:`.Inspector.get_table_names` does **not** change for Postgresql, it
+ continues to return only the names of plain tables.
+
+* A new method :meth:`.PGInspector.get_foreign_table_names` is added which
+ will return the names of tables that are specifically marked as "foreign"
+ in the Postgresql schema tables.
+
+The change to reflection involves adding ``'m'`` and ``'f'`` to the list
+of qualifiers we use when querying ``pg_class.relkind``, but this change
+is new in 1.0.0 to avoid any backwards-incompatible surprises for those
+running 0.9 in production.
+
+:ticket:`2891`
+
+.. _feature_gh134:
+
+Postgresql FILTER keyword
+-------------------------
+
+The SQL standard FILTER keyword for aggregate functions is now supported
+by Postgresql as of 9.4. SQLAlchemy allows this using
+:meth:`.FunctionElement.filter`::
+
+ func.count(1).filter(True)
+
+.. seealso::
+
+ :meth:`.FunctionElement.filter`
+
+ :class:`.FunctionFilter`
+
+.. _feature_3184:
+
+UniqueConstraint is now part of the Table reflection process
+------------------------------------------------------------
+
+A :class:`.Table` object populated using ``autoload=True`` will now
+include :class:`.UniqueConstraint` constructs as well as
+:class:`.Index` constructs. This logic has a few caveats for
+Postgresql and Mysql:
+
+Postgresql
+^^^^^^^^^^
+
+Postgresql has the behavior such that when a UNIQUE constraint is
+created, it implicitly creates a UNIQUE INDEX corresponding to that
+constraint as well. The :meth:`.Inspector.get_indexes` and the
+:meth:`.Inspector.get_unique_constraints` methods will continue to
+**both** return these entries distinctly, where
+:meth:`.Inspector.get_indexes` now features a token
+``duplicates_constraint`` within the index entry indicating the
+corresponding constraint when detected. However, when performing
+full table reflection using ``Table(..., autoload=True)``, the
+:class:`.Index` construct is detected as being linked to the
+:class:`.UniqueConstraint`, and is **not** present within the
+:attr:`.Table.indexes` collection; only the :class:`.UniqueConstraint`
+will be present in the :attr:`.Table.constraints` collection. This
+deduplication logic works by joining to the ``pg_constraint`` table
+when querying ``pg_index`` to see if the two constructs are linked.
+
+MySQL
+^^^^^
+
+MySQL does not have separate concepts for a UNIQUE INDEX and a UNIQUE
+constraint. While it supports both syntaxes when creating tables and indexes,
+it does not store them any differently. The
+:meth:`.Inspector.get_indexes`
+and the :meth:`.Inspector.get_unique_constraints` methods will continue to
+**both** return an entry for a UNIQUE index in MySQL,
+where :meth:`.Inspector.get_unique_constraints` features a new token
+``duplicates_index`` within the constraint entry indicating that this is a
+dupe entry corresponding to that index. However, when performing
+full table reflection using ``Table(..., autoload=True)``,
+the :class:`.UniqueConstraint` construct is
+**not** part of the fully reflected :class:`.Table` construct under any
+circumstances; this construct is always represented by a :class:`.Index`
+with the ``unique=True`` setting present in the :attr:`.Table.indexes`
+collection.
+
+.. seealso::
+
+ :ref:`postgresql_index_reflection`
+
+ :ref:`mysql_unique_constraints`
+
+:ticket:`3184`
+
Behavioral Improvements
=======================
@@ -121,7 +420,6 @@ MacBookPro is 31 seconds on 0.9 and 26 seconds on 1.0, the extra time spent
setting up very large memory buffers.
-
.. _feature_3176:
New KeyedTuple implementation dramatically faster
@@ -170,6 +468,49 @@ object totally smokes both namedtuple and KeyedTuple::
:ticket:`3176`
+.. _bug_3035:
+
+Session.get_bind() handles a wider variety of inheritance scenarios
+-------------------------------------------------------------------
+
+The :meth:`.Session.get_bind` method is invoked whenever a query or unit
+of work flush process seeks to locate the database engine that corresponds
+to a particular class. The method has been improved to handle a variety
+of inheritance-oriented scenarios, including:
+
+* Binding to a Mixin or Abstract Class::
+
+ class MyClass(SomeMixin, Base):
+ __tablename__ = 'my_table'
+ # ...
+
+ session = Session(binds={SomeMixin: some_engine})
+
+
+* Binding to inherited concrete subclasses individually based on table::
+
+ class BaseClass(Base):
+ __tablename__ = 'base'
+
+ # ...
+
+ class ConcreteSubClass(BaseClass):
+ __tablename__ = 'concrete'
+
+ # ...
+
+ __mapper_args__ = {'concrete': True}
+
+
+ session = Session(binds={
+ base_table: some_engine,
+ concrete_table: some_other_engine
+ })
+
+
+:ticket:`3035`
+
+
.. _feature_3178:
New systems to safely emit parameterized warnings
@@ -307,140 +648,71 @@ Renders::
:ticket:`3177`
-.. _feature_3150:
-Improvements to declarative mixins, ``@declared_attr`` and related features
-----------------------------------------------------------------------------
+.. _migration_3222:
-The declarative system in conjunction with :class:`.declared_attr` has been
-overhauled to support new capabilities.
-A function decorated with :class:`.declared_attr` is now called only **after**
-any mixin-based column copies are generated. This means the function can
-call upon mixin-established columns and will receive a reference to the correct
-:class:`.Column` object::
-
- class HasFooBar(object):
- foobar = Column(Integer)
+single-table-inheritance criteria added to all ON clauses unconditionally
+-------------------------------------------------------------------------
- @declared_attr
- def foobar_prop(cls):
- return column_property('foobar: ' + cls.foobar)
+When joining to a single-table inheritance subclass target, the ORM always adds
+the "single table criteria" when joining on a relationship. Given a
+mapping as::
- class SomeClass(HasFooBar, Base):
- __tablename__ = 'some_table'
+ class Widget(Base):
+ __tablename__ = 'widget'
id = Column(Integer, primary_key=True)
+ type = Column(String)
+ related_id = Column(ForeignKey('related.id'))
+ related = relationship("Related", backref="widget")
+ __mapper_args__ = {'polymorphic_on': type}
-Above, ``SomeClass.foobar_prop`` will be invoked against ``SomeClass``,
-and ``SomeClass.foobar`` will be the final :class:`.Column` object that is
-to be mapped to ``SomeClass``, as opposed to the non-copied object present
-directly on ``HasFooBar``, even though the columns aren't mapped yet.
-
-The :class:`.declared_attr` function now **memoizes** the value
-that's returned on a per-class basis, so that repeated calls to the same
-attribute will return the same value. We can alter the example to illustrate
-this::
- class HasFooBar(object):
- @declared_attr
- def foobar(cls):
- return Column(Integer)
+ class FooWidget(Widget):
+ __mapper_args__ = {'polymorphic_identity': 'foo'}
- @declared_attr
- def foobar_prop(cls):
- return column_property('foobar: ' + cls.foobar)
- class SomeClass(HasFooBar, Base):
- __tablename__ = 'some_table'
+ class Related(Base):
+ __tablename__ = 'related'
id = Column(Integer, primary_key=True)
-Previously, ``SomeClass`` would be mapped with one particular copy of
-the ``foobar`` column, but the ``foobar_prop`` by calling upon ``foobar``
-a second time would produce a different column. The value of
-``SomeClass.foobar`` is now memoized during declarative setup time, so that
-even before the attribute is mapped by the mapper, the interim column
-value will remain consistent no matter how many times the
-:class:`.declared_attr` is called upon.
+It's been the behavior for quite some time that a JOIN on the relationship
+will render a "single inheritance" clause for the type::
-The two behaviors above should help considerably with declarative definition
-of many types of mapper properties that derive from other attributes, where
-the :class:`.declared_attr` function is called upon from other
-:class:`.declared_attr` functions locally present before the class is
-actually mapped.
+ s.query(Related).join(FooWidget, Related.widget).all()
-For a pretty slim edge case where one wishes to build a declarative mixin
-that establishes distinct columns per subclass, a new modifier
-:attr:`.declared_attr.cascading` is added. With this modifier, the
-decorated function will be invoked individually for each class in the
-mapped inheritance hierarchy. While this is already the behavior for
-special attributes such as ``__table_args__`` and ``__mapper_args__``,
-for columns and other properties the behavior by default assumes that attribute
-is affixed to the base class only, and just inherited from subclasses.
-With :attr:`.declared_attr.cascading`, individual behaviors can be
-applied::
+SQL output::
- class HasSomeAttribute(object):
- @declared_attr.cascading
- def some_id(cls):
- if has_inherited_table(cls):
- return Column(ForeignKey('myclass.id'), primary_key=True)
- else:
- return Column(Integer, primary_key=True)
+ SELECT related.id AS related_id
+ FROM related JOIN widget ON related.id = widget.related_id AND widget.type IN (:type_1)
- return Column('id', Integer, primary_key=True)
+Above, because we joined to a subclass ``FooWidget``, :meth:`.Query.join`
+knew to add the ``AND widget.type IN ('foo')`` criteria to the ON clause.
- class MyClass(HasSomeAttribute, Base):
- ""
- # ...
-
- class MySubClass(MyClass):
- ""
- # ...
+The change here is that the ``AND widget.type IN()`` criteria is now appended
+to *any* ON clause, not just those generated from a relationship,
+including one that is explicitly stated::
-.. seealso::
+ # ON clause will now render as
+ # related.id = widget.related_id AND widget.type IN (:type_1)
+ s.query(Related).join(FooWidget, FooWidget.related_id == Related.id).all()
- :ref:`mixin_inheritance_columns`
+As well as the "implicit" join when no ON clause of any kind is stated::
-Finally, the :class:`.AbstractConcreteBase` class has been reworked
-so that a relationship or other mapper property can be set up inline
-on the abstract base::
+ # ON clause will now render as
+ # related.id = widget.related_id AND widget.type IN (:type_1)
+ s.query(Related).join(FooWidget).all()
- from sqlalchemy import Column, Integer, ForeignKey
- from sqlalchemy.orm import relationship
- from sqlalchemy.ext.declarative import (declarative_base, declared_attr,
- AbstractConcreteBase)
-
- Base = declarative_base()
-
- class Something(Base):
- __tablename__ = u'something'
- id = Column(Integer, primary_key=True)
-
-
- class Abstract(AbstractConcreteBase, Base):
- id = Column(Integer, primary_key=True)
-
- @declared_attr
- def something_id(cls):
- return Column(ForeignKey(Something.id))
-
- @declared_attr
- def something(cls):
- return relationship(Something)
+Previously, the ON clause for these would not include the single-inheritance
+criteria. Applications that are already adding this criteria to work around
+this will want to remove its explicit use, though it should continue to work
+fine if the criteria happens to be rendered twice in the meantime.
+.. seealso::
- class Concrete(Abstract):
- __tablename__ = u'cca'
- __mapper_args__ = {'polymorphic_identity': 'cca', 'concrete': True}
+ :ref:`bug_3233`
-
-The above mapping will set up a table ``cca`` with both an ``id`` and
-a ``something_id`` column, and ``Concrete`` will also have a relationship
-``something``. The new feature is that ``Abstract`` will also have an
-independently configured relationship ``something`` that builds against
-the polymorphic union of the base.
-
-:ticket:`3150` :ticket:`2670` :ticket:`3149` :ticket:`2952` :ticket:`3050`
+:ticket:`3222`
.. _bug_3188:
@@ -521,11 +793,88 @@ would again fail; these have also been fixed.
:ticket:`3148` :ticket:`3188`
+.. _bug_3170:
+
+null(), false() and true() constants are no longer singletons
+-------------------------------------------------------------
+
+These three constants were changed to return a "singleton" value
+in 0.9; unfortunately, that would lead to a query like the following
+to not render as expected::
+
+ select([null(), null()])
+
+rendering only ``SELECT NULL AS anon_1``, because the two :func:`.null`
+constructs would come out as the same ``NULL`` object, and
+SQLAlchemy's Core model is based on object identity in order to
+determine lexical significance. The change in 0.9 had no
+importance other than the desire to save on object overhead; in general,
+an unnamed construct needs to stay lexically unique so that it gets
+labeled uniquely.
+
+:ticket:`3170`
+
.. _behavioral_changes_orm_10:
Behavioral Changes - ORM
========================
+.. _bug_3228:
+
+query.update() now resolves string names into mapped attribute names
+--------------------------------------------------------------------
+
+The documentation for :meth:`.Query.update` states that the given
+``values`` dictionary is "a dictionary with attributes names as keys",
+implying that these are mapped attribute names. Unfortunately, the function
+was designed more in mind to receive attributes and SQL expressions and
+not as much strings; when strings
+were passed, these strings would be passed through straight to the core
+update statement without any resolution as far as how these names are
+represented on the mapped class, meaning the name would have to match that
+of a table column exactly, not how an attribute of that name was mapped
+onto the class.
+
+The string names are now resolved as attribute names in earnest::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ name = Column('user_name', String(50))
+
+Above, the column ``user_name`` is mapped as ``name``. Previously,
+a call to :meth:`.Query.update` that was passed strings would have to
+have been called as follows::
+
+ session.query(User).update({'user_name': 'moonbeam'})
+
+The given string is now resolved against the entity::
+
+ session.query(User).update({'name': 'moonbeam'})
+
+It is typically preferable to use the attribute directly, to avoid any
+ambiguity::
+
+ session.query(User).update({User.name: 'moonbeam'})
+
+The change also indicates that synonyms and hybrid attributes can be referred
+to by string name as well::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ name = Column('user_name', String(50))
+
+ @hybrid_property
+ def fullname(self):
+ return self.name
+
+ session.query(User).update({'fullname': 'moonbeam'})
+
+:ticket:`3228`
+
.. _migration_3061:
Changes to attribute events and other operations regarding attributes that have no pre-existing value
@@ -603,6 +952,39 @@ symbol, and no change to the object's state occurs.
:ticket:`3061`
+.. _bug_3139:
+
+session.expunge() will fully detach an object that's been deleted
+-----------------------------------------------------------------
+
+The behavior of :meth:`.Session.expunge` had a bug that caused an
+inconsistency in behavior regarding deleted objects. The
+:func:`.object_session` function as well as the :attr:`.InstanceState.session`
+attribute would still report object as belonging to the :class:`.Session`
+subsequent to the expunge::
+
+ u1 = sess.query(User).first()
+ sess.delete(u1)
+
+ sess.flush()
+
+ assert u1 not in sess
+ assert inspect(u1).session is sess # this is normal before commit
+
+ sess.expunge(u1)
+
+ assert u1 not in sess
+ assert inspect(u1).session is None # would fail
+
+Note that it is normal for ``u1 not in sess`` to be True while
+``inspect(u1).session`` still refers to the session, while the transaction
+is ongoing subsequent to the delete operation and :meth:`.Session.expunge`
+has not been called; the full detachment normally completes once the
+transaction is committed. This issue would also impact functions
+that rely on :meth:`.Session.expunge` such as :func:`.make_transient`.
+
+:ticket:`3139`
+
.. _migration_yield_per_eager_loading:
Joined/Subquery eager loading explicitly disallowed with yield_per
@@ -628,6 +1010,91 @@ joined loader options can still be used::
q = sess.query(Object).options(
lazyload('*'), joinedload("some_manytoone")).yield_per(100)
+.. _bug_3233:
+
+Single inheritance join targets will no longer sometimes implicitly alias themselves
+------------------------------------------------------------------------------------
+
+This is a bug where an unexpected and inconsistent behavior would occur
+in some scenarios when joining to a single-table-inheritance entity. The
+difficulty this might cause is that the query is supposed to raise an error,
+as it is invalid SQL, however the bug would cause an alias to be added which
+makes the query "work". The issue is confusing because this aliasing
+is not applied consistently and could change based on the nature of the query
+preceding the join.
+
+A simple example is::
+
+ from sqlalchemy import Integer, Column, String, ForeignKey
+ from sqlalchemy.orm import Session, relationship
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = "a"
+
+ id = Column(Integer, primary_key=True)
+ type = Column(String)
+
+ __mapper_args__ = {'polymorphic_on': type, 'polymorphic_identity': 'a'}
+
+
+ class ASub1(A):
+ __mapper_args__ = {'polymorphic_identity': 'asub1'}
+
+
+ class ASub2(A):
+ __mapper_args__ = {'polymorphic_identity': 'asub2'}
+
+
+ class B(Base):
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+
+ a_id = Column(Integer, ForeignKey("a.id"))
+
+ a = relationship("A", primaryjoin="B.a_id == A.id", backref='b')
+
+ s = Session()
+
+ print s.query(ASub1).join(B, ASub1.b).join(ASub2, B.a)
+
+ print s.query(ASub1).join(B, ASub1.b).join(ASub2, ASub2.id == B.a_id)
+
+The two queries at the bottom are equivalent, and should both render
+the identical SQL::
+
+ SELECT a.id AS a_id, a.type AS a_type
+ FROM a JOIN b ON b.a_id = a.id JOIN a ON b.a_id = a.id AND a.type IN (:type_1)
+ WHERE a.type IN (:type_2)
+
+The above SQL is invalid, as it renders "a" within the FROM list twice.
+The bug however would occur with the second query only and render this instead::
+
+ SELECT a.id AS a_id, a.type AS a_type
+ FROM a JOIN b ON b.a_id = a.id JOIN a AS a_1
+ ON a_1.id = b.a_id AND a_1.type IN (:type_1)
+ WHERE a_1.type IN (:type_2)
+
+Where above, the second join to "a" is aliased. While this seems convenient,
+it's not how single-inheritance queries work in general and is misleading
+and inconsistent.
+
+The net effect is that applications which were relying on this bug will now
+have an error raised by the database. The solution is to use the expected
+form. When referring to multiple subclasses of a single-inheritance
+entity in a query, you must manually use aliases to disambiguate the table,
+as all the subclasses normally refer to the same table::
+
+ asub2_alias = aliased(ASub2)
+
+ print s.query(ASub1).join(B, ASub1.b).join(asub2_alias, B.a.of_type(asub2_alias))
+
+:ticket:`3233`
+
+
.. _migration_migration_deprecated_orm_events:
@@ -960,72 +1427,25 @@ A :class:`.Table` can be set up for reflection by passing
:ticket:`3027`
+.. _change_3243:
+
+ForeignKeyConstraint.columns is now a ColumnCollection
+------------------------------------------------------
+:attr:`.ForeignKeyConstraint.columns` was previously a plain list
+containing either strings or :class:`.Column` objects, depending on
+how the :class:`.ForeignKeyConstraint` was constructed and whether it was
+associated with a table. The collection is now a :class:`.ColumnCollection`,
+and is only initialized after the :class:`.ForeignKeyConstraint` is
+associated with a :class:`.Table`. A new accessor
+:attr:`.ForeignKeyConstraint.column_keys`
+is added to unconditionally return string keys for the local set of
+columns regardless of how the object was constructed or its current
+state.
Dialect Changes
===============
-.. _change_2051:
-
-New Postgresql Table options
------------------------------
-
-Added support for PG table options TABLESPACE, ON COMMIT,
-WITH(OUT) OIDS, and INHERITS, when rendering DDL via
-the :class:`.Table` construct.
-
-.. seealso::
-
- :ref:`postgresql_table_options`
-
-:ticket:`2051`
-
-.. _feature_get_enums:
-
-New get_enums() method with Postgresql Dialect
-----------------------------------------------
-
-The :func:`.inspect` method returns a :class:`.PGInspector` object in the
-case of Postgresql, which includes a new :meth:`.PGInspector.get_enums`
-method that returns information on all available ``ENUM`` types::
-
- from sqlalchemy import inspect, create_engine
-
- engine = create_engine("postgresql+psycopg2://host/dbname")
- insp = inspect(engine)
- print(insp.get_enums())
-
-.. seealso::
-
- :meth:`.PGInspector.get_enums`
-
-.. _feature_2891:
-
-Postgresql Dialect reflects Materialized Views, Foreign Tables
---------------------------------------------------------------
-
-Changes are as follows:
-
-* the :class:`Table` construct with ``autoload=True`` will now match a name
- that exists in the database as a materialized view or foriegn table.
-
-* :meth:`.Inspector.get_view_names` will return plain and materialized view
- names.
-
-* :meth:`.Inspector.get_table_names` does **not** change for Postgresql, it
- continues to return only the names of plain tables.
-
-* A new method :meth:`.PGInspector.get_foreign_table_names` is added which
- will return the names of tables that are specifically marked as "foreign"
- in the Postgresql schema tables.
-
-The change to reflection involves adding ``'m'`` and ``'f'`` to the list
-of qualifiers we use when querying ``pg_class.relkind``, but this change
-is new in 1.0.0 to avoid any backwards-incompatible surprises for those
-running 0.9 in production.
-
-:ticket:`2891`
-
MySQL internal "no such table" exceptions not passed to event handlers
----------------------------------------------------------------------
diff --git a/doc/build/core/defaults.rst b/doc/build/core/defaults.rst
index 166273c18..1d55cd6c6 100644
--- a/doc/build/core/defaults.rst
+++ b/doc/build/core/defaults.rst
@@ -1,6 +1,8 @@
+.. module:: sqlalchemy.schema
+
.. _metadata_defaults_toplevel:
+
.. _metadata_defaults:
-.. module:: sqlalchemy.schema
Column Insert/Update Defaults
==============================
diff --git a/doc/build/core/sqlelement.rst b/doc/build/core/sqlelement.rst
index 61600e927..44a969dbb 100644
--- a/doc/build/core/sqlelement.rst
+++ b/doc/build/core/sqlelement.rst
@@ -35,6 +35,8 @@ used to construct any kind of typed SQL expression.
.. autodata:: func
+.. autofunction:: funcfilter
+
.. autofunction:: label
.. autofunction:: literal
@@ -109,6 +111,9 @@ used to construct any kind of typed SQL expression.
.. autoclass:: sqlalchemy.sql.elements.False_
:members:
+.. autoclass:: FunctionFilter
+ :members:
+
.. autoclass:: Label
:members:
diff --git a/doc/build/dialects/sqlite.rst b/doc/build/dialects/sqlite.rst
index 21fd4e3aa..a18b0ba7b 100644
--- a/doc/build/dialects/sqlite.rst
+++ b/doc/build/dialects/sqlite.rst
@@ -28,4 +28,9 @@ they originate from :mod:`sqlalchemy.types` or from the local dialect::
Pysqlite
--------
-.. automodule:: sqlalchemy.dialects.sqlite.pysqlite \ No newline at end of file
+.. automodule:: sqlalchemy.dialects.sqlite.pysqlite
+
+Pysqlcipher
+-----------
+
+.. automodule:: sqlalchemy.dialects.sqlite.pysqlcipher \ No newline at end of file
diff --git a/doc/build/faq.rst b/doc/build/faq.rst
index 3dc81026b..586f66754 100644
--- a/doc/build/faq.rst
+++ b/doc/build/faq.rst
@@ -603,6 +603,108 @@ The same idea applies to all the other arguments, such as ``foreign_keys``::
foo = relationship(Dest, foreign_keys=[foo_id, bar_id])
+.. _faq_subqueryload_limit_sort:
+
+Why is ``ORDER BY`` required with ``LIMIT`` (especially with ``subqueryload()``)?
+---------------------------------------------------------------------------------
+
+A relational database can return rows in any
+arbitrary order, when an explicit ordering is not set.
+While this ordering very often corresponds to the natural
+order of rows within a table, this is not the case for all databases and
+all queries. The consequence of this is that any query that limits rows
+using ``LIMIT`` or ``OFFSET`` should **always** specify an ``ORDER BY``.
+Otherwise, it is not deterministic which rows will actually be returned.
+
+When we use a SQLAlchemy method like :meth:`.Query.first`, we are in fact
+applying a ``LIMIT`` of one to the query, so without an explicit ordering
+it is not deterministic what row we actually get back.
+While we may not notice this for simple queries on databases that usually
+returns rows in their natural
+order, it becomes much more of an issue if we also use :func:`.orm.subqueryload`
+to load related collections, and we may not be loading the collections
+as intended.
+
+SQLAlchemy implements :func:`.orm.subqueryload` by issuing a separate query,
+the results of which are matched up to the results from the first query.
+We see two queries emitted like this:
+
+.. sourcecode:: python+sql
+
+ >>> session.query(User).options(subqueryload(User.addresses)).all()
+ {opensql}-- the "main" query
+ SELECT users.id AS users_id
+ FROM users
+ {stop}
+ {opensql}-- the "load" query issued by subqueryload
+ SELECT addresses.id AS addresses_id,
+ addresses.user_id AS addresses_user_id,
+ anon_1.users_id AS anon_1_users_id
+ FROM (SELECT users.id AS users_id FROM users) AS anon_1
+ JOIN addresses ON anon_1.users_id = addresses.user_id
+ ORDER BY anon_1.users_id
+
+The second query embeds the first query as a source of rows.
+When the inner query uses ``OFFSET`` and/or ``LIMIT`` without ordering,
+the two queries may not see the same results:
+
+.. sourcecode:: python+sql
+
+ >>> user = session.query(User).options(subqueryload(User.addresses)).first()
+ {opensql}-- the "main" query
+ SELECT users.id AS users_id
+ FROM users
+ LIMIT 1
+ {stop}
+ {opensql}-- the "load" query issued by subqueryload
+ SELECT addresses.id AS addresses_id,
+ addresses.user_id AS addresses_user_id,
+ anon_1.users_id AS anon_1_users_id
+ FROM (SELECT users.id AS users_id FROM users LIMIT 1) AS anon_1
+ JOIN addresses ON anon_1.users_id = addresses.user_id
+ ORDER BY anon_1.users_id
+
+Depending on database specifics, there is
+a chance we may get the a result like the following for the two queries::
+
+ -- query #1
+ +--------+
+ |users_id|
+ +--------+
+ | 1|
+ +--------+
+
+ -- query #2
+ +------------+-----------------+---------------+
+ |addresses_id|addresses_user_id|anon_1_users_id|
+ +------------+-----------------+---------------+
+ | 3| 2| 2|
+ +------------+-----------------+---------------+
+ | 4| 2| 2|
+ +------------+-----------------+---------------+
+
+Above, we receive two ``addresses`` rows for ``user.id`` of 2, and none for
+1. We've wasted two rows and failed to actually load the collection. This
+is an insidious error because without looking at the SQL and the results, the
+ORM will not show that there's any issue; if we access the ``addresses``
+for the ``User`` we have, it will emit a lazy load for the collection and we
+won't see that anything actually went wrong.
+
+The solution to this problem is to always specify a deterministic sort order,
+so that the main query always returns the same set of rows. This generally
+means that you should :meth:`.Query.order_by` on a unique column on the table.
+The primary key is a good choice for this::
+
+ session.query(User).options(subqueryload(User.addresses)).order_by(User.id).first()
+
+Note that :func:`.joinedload` does not suffer from the same problem because
+only one query is ever issued, so the load query cannot be different from the
+main query.
+
+.. seealso::
+
+ :ref:`subqueryload_ordering`
+
Performance
===========
@@ -655,14 +757,14 @@ using a recipe like the following::
def before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
conn.info.setdefault('query_start_time', []).append(time.time())
- logger.debug("Start Query: %s" % statement)
+ logger.debug("Start Query: %s", statement)
@event.listens_for(Engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
total = time.time() - conn.info['query_start_time'].pop(-1)
logger.debug("Query Complete!")
- logger.debug("Total Time: %f" % total)
+ logger.debug("Total Time: %f", total)
Above, we use the :meth:`.ConnectionEvents.before_cursor_execute` and
:meth:`.ConnectionEvents.after_cursor_execute` events to establish an interception
diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst
index 9f01a3e24..0713634bc 100644
--- a/doc/build/orm/inheritance.rst
+++ b/doc/build/orm/inheritance.rst
@@ -475,6 +475,8 @@ subselect back to the parent ``companies`` table.
:func:`.orm.aliased` and :func:`.orm.with_polymorphic` constructs in conjunction
with :meth:`.Query.join`, ``any()`` and ``has()``.
+.. _eagerloading_polymorphic_subtypes:
+
Eager Loading of Specific or Polymorphic Subtypes
++++++++++++++++++++++++++++++++++++++++++++++++++
@@ -491,7 +493,7 @@ objects, querying the ``employee`` and ``engineer`` tables simultaneously::
)
)
-As is the case with :meth:`.Query.join`, :func:`~sqlalchemy.orm.interfaces.PropComparator.of_type`
+As is the case with :meth:`.Query.join`, :meth:`~PropComparator.of_type`
also can be used with eager loading and :func:`.orm.with_polymorphic`
at the same time, so that all sub-attributes of all referenced subtypes
can be loaded::
@@ -513,6 +515,23 @@ can be loaded::
:func:`~sqlalchemy.orm.interfaces.PropComparator.of_type`, supporting
single target types as well as :func:`.orm.with_polymorphic` targets.
+Another option for the above query is to state the two subtypes separately;
+the :func:`.joinedload` directive should detect this and create the
+above ``with_polymorphic`` construct automatically::
+
+ session.query(Company).\
+ options(
+ joinedload(Company.employees.of_type(Manager)),
+ joinedload(Company.employees.of_type(Engineer)),
+ )
+ )
+
+.. versionadded:: 1.0
+ Eager loaders such as :func:`.joinedload` will create a polymorphic
+ entity when multiple overlapping :meth:`~PropComparator.of_type`
+ directives are encountered.
+
+
Single Table Inheritance
------------------------
diff --git a/doc/build/orm/loading.rst b/doc/build/orm/loading.rst
index 6c2fac004..b2d8124e2 100644
--- a/doc/build/orm/loading.rst
+++ b/doc/build/orm/loading.rst
@@ -120,6 +120,32 @@ query options:
# set children to load eagerly with a second statement
session.query(Parent).options(subqueryload('children')).all()
+.. _subqueryload_ordering:
+
+The Importance of Ordering
+--------------------------
+
+A query which makes use of :func:`.subqueryload` in conjunction with a
+limiting modifier such as :meth:`.Query.first`, :meth:`.Query.limit`,
+or :meth:`.Query.offset` should **always** include :meth:`.Query.order_by`
+against unique column(s) such as the primary key, so that the additional queries
+emitted by :func:`.subqueryload` include
+the same ordering as used by the parent query. Without it, there is a chance
+that the inner query could return the wrong rows::
+
+ # incorrect, no ORDER BY
+ session.query(User).options(subqueryload(User.addresses)).first()
+
+ # incorrect if User.name is not unique
+ session.query(User).options(subqueryload(User.addresses)).order_by(User.name).first()
+
+ # correct
+ session.query(User).options(subqueryload(User.addresses)).order_by(User.name, User.id).first()
+
+.. seealso::
+
+ :ref:`faq_subqueryload_limit_sort` - detailed example
+
Loading Along Paths
-------------------
diff --git a/doc/build/orm/relationships.rst b/doc/build/orm/relationships.rst
index c65f06cbc..f512251a7 100644
--- a/doc/build/orm/relationships.rst
+++ b/doc/build/orm/relationships.rst
@@ -1079,12 +1079,15 @@ The above relationship will produce a join like::
ON host_entry_1.ip_address = CAST(host_entry.content AS INET)
An alternative syntax to the above is to use the :func:`.foreign` and
-:func:`.remote` :term:`annotations`, inline within the :paramref:`~.relationship.primaryjoin` expression.
+:func:`.remote` :term:`annotations`,
+inline within the :paramref:`~.relationship.primaryjoin` expression.
This syntax represents the annotations that :func:`.relationship` normally
applies by itself to the join condition given the :paramref:`~.relationship.foreign_keys` and
-:paramref:`~.relationship.remote_side` arguments; the functions are provided in the API in the
-rare case that :func:`.relationship` can't determine the exact location
-of these features on its own::
+:paramref:`~.relationship.remote_side` arguments. These functions may
+be more succinct when an explicit join condition is present, and additionally
+serve to mark exactly the column that is "foreign" or "remote" independent
+of whether that column is stated multiple times or within complex
+SQL expressions::
from sqlalchemy.orm import foreign, remote
@@ -1157,6 +1160,130 @@ Will render as::
flag to assist in the creation of :func:`.relationship` constructs using
custom operators.
+.. _relationship_overlapping_foreignkeys:
+
+Overlapping Foreign Keys
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+A rare scenario can arise when composite foreign keys are used, such that
+a single column may be the subject of more than one column
+referred to via foreign key constraint.
+
+Consider an (admittedly complex) mapping such as the ``Magazine`` object,
+referred to both by the ``Writer`` object and the ``Article`` object
+using a composite primary key scheme that includes ``magazine_id``
+for both; then to make ``Article`` refer to ``Writer`` as well,
+``Article.magazine_id`` is involved in two separate relationships;
+``Article.magazine`` and ``Article.writer``::
+
+ class Magazine(Base):
+ __tablename__ = 'magazine'
+
+ id = Column(Integer, primary_key=True)
+
+
+ class Article(Base):
+ __tablename__ = 'article'
+
+ article_id = Column(Integer)
+ magazine_id = Column(ForeignKey('magazine.id'))
+ writer_id = Column()
+
+ magazine = relationship("Magazine")
+ writer = relationship("Writer")
+
+ __table_args__ = (
+ PrimaryKeyConstraint('article_id', 'magazine_id'),
+ ForeignKeyConstraint(
+ ['writer_id', 'magazine_id'],
+ ['writer.id', 'writer.magazine_id']
+ ),
+ )
+
+
+ class Writer(Base):
+ __tablename__ = 'writer'
+
+ id = Column(Integer, primary_key=True)
+ magazine_id = Column(ForeignKey('magazine.id'), primary_key=True)
+ magazine = relationship("Magazine")
+
+When the above mapping is configured, we will see this warning emitted::
+
+ SAWarning: relationship 'Article.writer' will copy column
+ writer.magazine_id to column article.magazine_id,
+ which conflicts with relationship(s): 'Article.magazine'
+ (copies magazine.id to article.magazine_id). Consider applying
+ viewonly=True to read-only relationships, or provide a primaryjoin
+ condition marking writable columns with the foreign() annotation.
+
+What this refers to originates from the fact that ``Article.magazine_id`` is
+the subject of two different foreign key constraints; it refers to
+``Magazine.id`` directly as a source column, but also refers to
+``Writer.magazine_id`` as a source column in the context of the
+composite key to ``Writer``. If we associate an ``Article`` with a
+particular ``Magazine``, but then associate the ``Article`` with a
+``Writer`` that's associated with a *different* ``Magazine``, the ORM
+will overwrite ``Article.magazine_id`` non-deterministically, silently
+changing which magazine we refer towards; it may
+also attempt to place NULL into this columnn if we de-associate a
+``Writer`` from an ``Article``. The warning lets us know this is the case.
+
+To solve this, we need to break out the behavior of ``Article`` to include
+all three of the following features:
+
+1. ``Article`` first and foremost writes to
+ ``Article.magazine_id`` based on data persisted in the ``Article.magazine``
+ relationship only, that is a value copied from ``Magazine.id``.
+
+2. ``Article`` can write to ``Article.writer_id`` on behalf of data
+ persisted in the ``Article.writer`` relationship, but only the
+ ``Writer.id`` column; the ``Writer.magazine_id`` column should not
+ be written into ``Article.magazine_id`` as it ultimately is sourced
+ from ``Magazine.id``.
+
+3. ``Article`` takes ``Article.magazine_id`` into account when loading
+ ``Article.writer``, even though it *doesn't* write to it on behalf
+ of this relationship.
+
+To get just #1 and #2, we could specify only ``Article.writer_id`` as the
+"foreign keys" for ``Article.writer``::
+
+ class Article(Base):
+ # ...
+
+ writer = relationship("Writer", foreign_keys='Article.writer_id')
+
+However, this has the effect of ``Article.writer`` not taking
+``Article.magazine_id`` into account when querying against ``Writer``:
+
+.. sourcecode:: sql
+
+ SELECT article.article_id AS article_article_id,
+ article.magazine_id AS article_magazine_id,
+ article.writer_id AS article_writer_id
+ FROM article
+ JOIN writer ON writer.id = article.writer_id
+
+Therefore, to get at all of #1, #2, and #3, we express the join condition
+as well as which columns to be written by combining
+:paramref:`~.relationship.primaryjoin` fully, along with either the
+:paramref:`~.relationship.foreign_keys` argument, or more succinctly by
+annotating with :func:`~.orm.foreign`::
+
+ class Article(Base):
+ # ...
+
+ writer = relationship(
+ "Writer",
+ primaryjoin="and_(Writer.id == foreign(Article.writer_id), "
+ "Writer.magazine_id == Article.magazine_id)")
+
+.. versionchanged:: 1.0.0 the ORM will attempt to warn when a column is used
+ as the synchronization target from more than one relationship
+ simultaneously.
+
+
Non-relational Comparisons / Materialized Path
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst
index f1b6a4499..8871ce765 100644
--- a/doc/build/orm/tutorial.rst
+++ b/doc/build/orm/tutorial.rst
@@ -1631,6 +1631,13 @@ very easy to use:
>>> jack.addresses
[<Address(email_address='jack@google.com')>, <Address(email_address='j25@yahoo.com')>]
+.. note::
+
+ :func:`.subqueryload` when used in conjunction with limiting such as
+ :meth:`.Query.first`, :meth:`.Query.limit` or :meth:`.Query.offset`
+ should also include :meth:`.Query.order_by` on a unique column in order to
+ ensure correct results. See :ref:`subqueryload_ordering`.
+
Joined Load
-------------
diff --git a/examples/generic_associations/discriminator_on_association.py b/examples/generic_associations/discriminator_on_association.py
index e03cfec00..7bb04cf85 100644
--- a/examples/generic_associations/discriminator_on_association.py
+++ b/examples/generic_associations/discriminator_on_association.py
@@ -84,6 +84,7 @@ class HasAddresses(object):
"%sAddressAssociation" % name,
(AddressAssociation, ),
dict(
+ __tablename__=None,
__mapper_args__={
"polymorphic_identity": discriminator
}
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 853566172..d184e1fbf 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -25,6 +25,7 @@ from .sql import (
extract,
false,
func,
+ funcfilter,
insert,
intersect,
intersect_all,
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index ba3050ae5..dad02ee0f 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -846,7 +846,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
"SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer. format_table(
self.compiled.statement.table)))
- except:
+ except Exception:
pass
def get_result_proxy(self):
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 7ccd59abb..58eb3afa0 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -341,6 +341,29 @@ reflection will not include foreign keys. For these tables, you may supply a
:ref:`mysql_storage_engines`
+.. _mysql_unique_constraints:
+
+MySQL Unique Constraints and Reflection
+---------------------------------------
+
+SQLAlchemy supports both the :class:`.Index` construct with the
+flag ``unique=True``, indicating a UNIQUE index, as well as the
+:class:`.UniqueConstraint` construct, representing a UNIQUE constraint.
+Both objects/syntaxes are supported by MySQL when emitting DDL to create
+these constraints. However, MySQL does not have a unique constraint
+construct that is separate from a unique index; that is, the "UNIQUE"
+constraint on MySQL is equivalent to creating a "UNIQUE INDEX".
+
+When reflecting these constructs, the :meth:`.Inspector.get_indexes`
+and the :meth:`.Inspector.get_unique_constraints` methods will **both**
+return an entry for a UNIQUE index in MySQL. However, when performing
+full table reflection using ``Table(..., autoload=True)``,
+the :class:`.UniqueConstraint` construct is
+**not** part of the fully reflected :class:`.Table` construct under any
+circumstances; this construct is always represented by a :class:`.Index`
+with the ``unique=True`` setting present in the :attr:`.Table.indexes`
+collection.
+
.. _mysql_timestamp_null:
@@ -2317,7 +2340,7 @@ class MySQLDialect(default.DefaultDialect):
# basic operations via autocommit fail.
try:
dbapi_connection.commit()
- except:
+ except Exception:
if self.server_version_info < (3, 23, 15):
args = sys.exc_info()[1].args
if args and args[0] == 1064:
@@ -2329,7 +2352,7 @@ class MySQLDialect(default.DefaultDialect):
try:
dbapi_connection.rollback()
- except:
+ except Exception:
if self.server_version_info < (3, 23, 15):
args = sys.exc_info()[1].args
if args and args[0] == 1064:
@@ -2570,7 +2593,7 @@ class MySQLDialect(default.DefaultDialect):
pass
else:
self.logger.info(
- "Converting unknown KEY type %s to a plain KEY" % flavor)
+ "Converting unknown KEY type %s to a plain KEY", flavor)
pass
index_d = {}
index_d['name'] = spec['name']
@@ -2590,7 +2613,8 @@ class MySQLDialect(default.DefaultDialect):
return [
{
'name': key['name'],
- 'column_names': [col[0] for col in key['columns']]
+ 'column_names': [col[0] for col in key['columns']],
+ 'duplicates_index': key['name'],
}
for key in parsed_state.keys
if key['type'] == 'UNIQUE'
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index e51e80005..417e1ad6f 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -21,6 +21,7 @@ from .base import (MySQLDialect, MySQLExecutionContext,
BIT)
from ... import util
+import re
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
@@ -31,18 +32,34 @@ class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
- return self.process(binary.left, **kw) + " %% " + \
- self.process(binary.right, **kw)
+ if self.dialect._mysqlconnector_double_percents:
+ return self.process(binary.left, **kw) + " %% " + \
+ self.process(binary.right, **kw)
+ else:
+ return self.process(binary.left, **kw) + " % " + \
+ self.process(binary.right, **kw)
def post_process_text(self, text):
- return text.replace('%', '%%')
+ if self.dialect._mysqlconnector_double_percents:
+ return text.replace('%', '%%')
+ else:
+ return text
+
+ def escape_literal_column(self, text):
+ if self.dialect._mysqlconnector_double_percents:
+ return text.replace('%', '%%')
+ else:
+ return text
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
- return value.replace("%", "%%")
+ if self.dialect._mysqlconnector_double_percents:
+ return value.replace("%", "%%")
+ else:
+ return value
class _myconnpyBIT(BIT):
@@ -55,8 +72,6 @@ class _myconnpyBIT(BIT):
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = 'mysqlconnector'
- if util.py2k:
- supports_unicode_statements = False
supports_unicode_binds = True
supports_sane_rowcount = True
@@ -77,6 +92,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
}
)
+ @util.memoized_property
+ def supports_unicode_statements(self):
+ return util.py3k or self._mysqlconnector_version_info > (2, 0)
+
@classmethod
def dbapi(cls):
from mysql import connector
@@ -103,10 +122,25 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
'client_flags', ClientFlag.get_default())
client_flags |= ClientFlag.FOUND_ROWS
opts['client_flags'] = client_flags
- except:
+ except Exception:
pass
return [[], opts]
+ @util.memoized_property
+ def _mysqlconnector_version_info(self):
+ if self.dbapi and hasattr(self.dbapi, '__version__'):
+ m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
+ self.dbapi.__version__)
+ if m:
+ return tuple(
+ int(x)
+ for x in m.group(1, 2, 3)
+ if x is not None)
+
+ @util.memoized_property
+ def _mysqlconnector_double_percents(self):
+ return not util.py3k and self._mysqlconnector_version_info < (2, 0)
+
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = dbapi_con.get_server_version()
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 837a498fb..6df38e57e 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -813,7 +813,8 @@ class OracleDDLCompiler(compiler.DDLCompiler):
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = set([x.lower() for x in RESERVED_WORDS])
- illegal_initial_characters = set(range(0, 10)).union(["_", "$"])
+ illegal_initial_characters = set(
+ (str(dig) for dig in range(0, 10))).union(["_", "$"])
def _bindparam_requires_quotes(self, value):
"""Return True if the given identifier requires quoting."""
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index b9a0d461b..baa640eaa 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -402,6 +402,28 @@ underlying CREATE INDEX command, so it *must* be a valid index type for your
version of PostgreSQL.
+.. _postgresql_index_reflection:
+
+Postgresql Index Reflection
+---------------------------
+
+The Postgresql database creates a UNIQUE INDEX implicitly whenever the
+UNIQUE CONSTRAINT construct is used. When inspecting a table using
+:class:`.Inspector`, the :meth:`.Inspector.get_indexes`
+and the :meth:`.Inspector.get_unique_constraints` will report on these
+two constructs distinctly; in the case of the index, the key
+``duplicates_constraint`` will be present in the index entry if it is
+detected as mirroring a constraint. When performing reflection using
+``Table(..., autoload=True)``, the UNIQUE INDEX is **not** returned
+in :attr:`.Table.indexes` when it is detected as mirroring a
+:class:`.UniqueConstraint` in the :attr:`.Table.constraints` collection.
+
+.. versionchanged:: 1.0.0 - :class:`.Table` reflection now includes
+ :class:`.UniqueConstraint` objects present in the :attr:`.Table.constraints`
+ collection; the Postgresql backend will no longer include a "mirrored"
+ :class:`.Index` construct in :attr:`.Table.indexes` if it is detected
+ as corresponding to a unique constraint.
+
Special Reflection Options
--------------------------
@@ -2471,14 +2493,19 @@ class PGDialect(default.DefaultDialect):
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
- a.attname, a.attnum, ix.indkey%s
+ a.attname, a.attnum, c.conrelid, ix.indkey%s
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
- join pg_class i on i.oid=ix.indexrelid
+ join pg_class i on i.oid = ix.indexrelid
left outer join
pg_attribute a
- on t.oid=a.attrelid and %s
+ on t.oid = a.attrelid and %s
+ left outer join
+ pg_constraint c
+ on (ix.indrelid = c.conrelid and
+ ix.indexrelid = c.conindid and
+ c.contype in ('p', 'u', 'x'))
WHERE
t.relkind IN ('r', 'v', 'f', 'm')
and t.oid = :table_oid
@@ -2501,7 +2528,7 @@ class PGDialect(default.DefaultDialect):
sv_idx_name = None
for row in c.fetchall():
- idx_name, unique, expr, prd, col, col_num, idx_key = row
+ idx_name, unique, expr, prd, col, col_num, conrelid, idx_key = row
if expr:
if idx_name != sv_idx_name:
@@ -2518,18 +2545,27 @@ class PGDialect(default.DefaultDialect):
% idx_name)
sv_idx_name = idx_name
+ has_idx = idx_name in indexes
index = indexes[idx_name]
if col is not None:
index['cols'][col_num] = col
- index['key'] = [int(k.strip()) for k in idx_key.split()]
- index['unique'] = unique
-
- return [
- {'name': name,
- 'unique': idx['unique'],
- 'column_names': [idx['cols'][i] for i in idx['key']]}
- for name, idx in indexes.items()
- ]
+ if not has_idx:
+ index['key'] = [int(k.strip()) for k in idx_key.split()]
+ index['unique'] = unique
+ if conrelid is not None:
+ index['duplicates_constraint'] = idx_name
+
+ result = []
+ for name, idx in indexes.items():
+ entry = {
+ 'name': name,
+ 'unique': idx['unique'],
+ 'column_names': [idx['cols'][i] for i in idx['key']]
+ }
+ if 'duplicates_constraint' in idx:
+ entry['duplicates_constraint'] = idx['duplicates_constraint']
+ result.append(entry)
+ return result
@reflection.cache
def get_unique_constraints(self, connection, table_name,
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 9dfd53e22..f67b2e3b0 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -32,10 +32,25 @@ psycopg2-specific keyword arguments which are accepted by
way of enabling this mode on a per-execution basis.
* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
per connection. True by default.
+
+ .. seealso::
+
+ :ref:`psycopg2_disable_native_unicode`
+
* ``isolation_level``: This option, available for all PostgreSQL dialects,
includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
- dialect. See :ref:`psycopg2_isolation_level`.
+ dialect.
+
+ .. seealso::
+
+ :ref:`psycopg2_isolation_level`
+
+* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
+ using psycopg2's ``set_client_encoding()`` method.
+
+ .. seealso::
+ :ref:`psycopg2_unicode`
Unix Domain Connections
------------------------
@@ -75,8 +90,10 @@ The following DBAPI-specific options are respected when used with
If ``None`` or not set, the ``server_side_cursors`` option of the
:class:`.Engine` is used.
-Unicode
--------
+.. _psycopg2_unicode:
+
+Unicode with Psycopg2
+----------------------
By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
extension, such that the DBAPI receives and returns all strings as Python
@@ -84,27 +101,51 @@ Unicode objects directly - SQLAlchemy passes these values through without
change. Psycopg2 here will encode/decode string values based on the
current "client encoding" setting; by default this is the value in
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
-Typically, this can be changed to ``utf-8``, as a more useful default::
+Typically, this can be changed to ``utf8``, as a more useful default::
+
+ # postgresql.conf file
- #client_encoding = sql_ascii # actually, defaults to database
+ # client_encoding = sql_ascii # actually, defaults to database
# encoding
client_encoding = utf8
A second way to affect the client encoding is to set it within Psycopg2
-locally. SQLAlchemy will call psycopg2's ``set_client_encoding()``
-method (see:
-http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding)
+locally. SQLAlchemy will call psycopg2's
+:meth:`psycopg2:connection.set_client_encoding` method
on all new connections based on the value passed to
:func:`.create_engine` using the ``client_encoding`` parameter::
+ # set_client_encoding() setting;
+ # works for *all* Postgresql versions
engine = create_engine("postgresql://user:pass@host/dbname",
client_encoding='utf8')
This overrides the encoding specified in the Postgresql client configuration.
+When using the parameter in this way, the psycopg2 driver emits
+``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
+in all Postgresql versions.
+
+Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
+is **not the same** as the more recently added ``client_encoding`` parameter
+now supported by libpq directly. This is enabled when ``client_encoding``
+is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
+using the :paramref:`.create_engine.connect_args` parameter::
+
+ # libpq direct parameter setting;
+ # only works for Postgresql **9.1 and above**
+ engine = create_engine("postgresql://user:pass@host/dbname",
+ connect_args={'client_encoding': 'utf8'})
+
+ # using the query string is equivalent
+ engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
-.. versionadded:: 0.7.3
- The psycopg2-specific ``client_encoding`` parameter to
- :func:`.create_engine`.
+The above parameter was only added to libpq as of version 9.1 of Postgresql,
+so using the previous method is better for cross-version support.
+
+.. _psycopg2_disable_native_unicode:
+
+Disabling Native Unicode
+^^^^^^^^^^^^^^^^^^^^^^^^
SQLAlchemy can also be instructed to skip the usage of the psycopg2
``UNICODE`` extension and to instead utilize its own unicode encode/decode
@@ -116,8 +157,56 @@ in and coerce from bytes on the way back,
using the value of the :func:`.create_engine` ``encoding`` parameter, which
defaults to ``utf-8``.
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
-obsolete as more DBAPIs support unicode fully along with the approach of
-Python 3; in modern usage psycopg2 should be relied upon to handle unicode.
+obsolete as most DBAPIs now support unicode fully.
+
+Bound Parameter Styles
+----------------------
+
+The default parameter style for the psycopg2 dialect is "pyformat", where
+SQL is rendered using ``%(paramname)s`` style. This format has the limitation
+that it does not accommodate the unusual case of parameter names that
+actually contain percent or parenthesis symbols; as SQLAlchemy in many cases
+generates bound parameter names based on the name of a column, the presence
+of these characters in a column name can lead to problems.
+
+There are two solutions to the issue of a :class:`.schema.Column` that contains
+one of these characters in its name. One is to specify the
+:paramref:`.schema.Column.key` for columns that have such names::
+
+ measurement = Table('measurement', metadata,
+ Column('Size (meters)', Integer, key='size_meters')
+ )
+
+Above, an INSERT statement such as ``measurement.insert()`` will use
+``size_meters`` as the parameter name, and a SQL expression such as
+``measurement.c.size_meters > 10`` will derive the bound parameter name
+from the ``size_meters`` key as well.
+
+.. versionchanged:: 1.0.0 - SQL expressions will use :attr:`.Column.key`
+ as the source of naming when anonymous bound parameters are created
+ in SQL expressions; previously, this behavior only applied to
+ :meth:`.Table.insert` and :meth:`.Table.update` parameter names.
+
+The other solution is to use a positional format; psycopg2 allows use of the
+"format" paramstyle, which can be passed to
+:paramref:`.create_engine.paramstyle`::
+
+ engine = create_engine(
+ 'postgresql://scott:tiger@localhost:5432/test', paramstyle='format')
+
+With the above engine, instead of a statement like::
+
+ INSERT INTO measurement ("Size (meters)") VALUES (%(Size (meters))s)
+ {'Size (meters)': 1}
+
+we instead see::
+
+ INSERT INTO measurement ("Size (meters)") VALUES (%s)
+ (1, )
+
+Where above, the dictionary style is converted into a tuple with positional
+style.
+
Transactions
------------
diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py
index 0eceaa537..a53d53e9d 100644
--- a/lib/sqlalchemy/dialects/sqlite/__init__.py
+++ b/lib/sqlalchemy/dialects/sqlite/__init__.py
@@ -5,7 +5,7 @@
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-from sqlalchemy.dialects.sqlite import base, pysqlite
+from sqlalchemy.dialects.sqlite import base, pysqlite, pysqlcipher
# default dialect
base.dialect = pysqlite.dialect
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index 335b35c94..33003297c 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -646,8 +646,8 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
def visit_foreign_key_constraint(self, constraint):
- local_table = list(constraint._elements.values())[0].parent.table
- remote_table = list(constraint._elements.values())[0].column.table
+ local_table = constraint.elements[0].parent.table
+ remote_table = constraint.elements[0].column.table
if local_table.schema != remote_table.schema:
return None
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py
new file mode 100644
index 000000000..3c55a1de7
--- /dev/null
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py
@@ -0,0 +1,116 @@
+# sqlite/pysqlcipher.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+.. dialect:: sqlite+pysqlcipher
+ :name: pysqlcipher
+ :dbapi: pysqlcipher
+ :connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=<iter>]
+ :url: https://pypi.python.org/pypi/pysqlcipher
+
+ ``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make
+ use of the `SQLCipher <https://www.zetetic.net/sqlcipher>`_ backend.
+
+ .. versionadded:: 0.9.9
+
+Driver
+------
+
+The driver here is the `pysqlcipher <https://pypi.python.org/pypi/pysqlcipher>`_
+driver, which makes use of the SQLCipher engine. This system essentially
+introduces new PRAGMA commands to SQLite which allows the setting of a
+passphrase and other encryption parameters, allowing the database
+file to be encrypted.
+
+Connect Strings
+---------------
+
+The format of the connect string is in every way the same as that
+of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the
+"password" field is now accepted, which should contain a passphrase::
+
+ e = create_engine('sqlite+pysqlcipher://:testing@/foo.db')
+
+For an absolute file path, two leading slashes should be used for the
+database name::
+
+ e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db')
+
+A selection of additional encryption-related pragmas supported by SQLCipher
+as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed
+in the query string, and will result in that PRAGMA being called for each
+new connection. Currently, ``cipher``, ``kdf_iter``
+``cipher_page_size`` and ``cipher_use_hmac`` are supported::
+
+ e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000')
+
+
+Pooling Behavior
+----------------
+
+The driver makes a change to the default pool behavior of pysqlite
+as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver
+has been observed to be significantly slower on connection than the
+pysqlite driver, most likely due to the encryption overhead, so the
+dialect here defaults to using the :class:`.SingletonThreadPool`
+implementation,
+instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool
+implementation is entirely configurable using the
+:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may
+be more feasible for single-threaded use, or :class:`.NullPool` may be used
+to prevent unencrypted connections from being held open for long periods of
+time, at the expense of slower startup time for new connections.
+
+
+"""
+from __future__ import absolute_import
+from .pysqlite import SQLiteDialect_pysqlite
+from ...engine import url as _url
+from ... import pool
+
+
+class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite):
+ driver = 'pysqlcipher'
+
+ pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac')
+
+ @classmethod
+ def dbapi(cls):
+ from pysqlcipher import dbapi2 as sqlcipher
+ return sqlcipher
+
+ @classmethod
+ def get_pool_class(cls, url):
+ return pool.SingletonThreadPool
+
+ def connect(self, *cargs, **cparams):
+ passphrase = cparams.pop('passphrase', '')
+
+ pragmas = dict(
+ (key, cparams.pop(key, None)) for key in
+ self.pragmas
+ )
+
+ conn = super(SQLiteDialect_pysqlcipher, self).\
+ connect(*cargs, **cparams)
+ conn.execute('pragma key="%s"' % passphrase)
+ for prag, value in pragmas.items():
+ if value is not None:
+ conn.execute('pragma %s=%s' % (prag, value))
+
+ return conn
+
+ def create_connect_args(self, url):
+ super_url = _url.URL(
+ url.drivername, username=url.username,
+ host=url.host, database=url.database, query=url.query)
+ c_args, opts = super(SQLiteDialect_pysqlcipher, self).\
+ create_connect_args(super_url)
+ opts['passphrase'] = url.password
+ return c_args, opts
+
+dialect = SQLiteDialect_pysqlcipher
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 68145f5cd..cf75871bf 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -292,6 +292,17 @@ def create_engine(*args, **kwargs):
be used instead. Can be used for testing of DBAPIs as well as to
inject "mock" DBAPI implementations into the :class:`.Engine`.
+ :param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
+ to use when rendering bound parameters. This style defaults to the
+ one recommended by the DBAPI itself, which is retrieved from the
+ ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
+ more than one paramstyle, and in particular it may be desirable
+ to change a "named" paramstyle into a "positional" one, or vice versa.
+ When this attribute is passed, it should be one of the values
+ ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
+ ``"pyformat"``, and should correspond to a parameter style known
+ to be supported by the DBAPI in use.
+
:param pool=None: an already-constructed instance of
:class:`~sqlalchemy.pool.Pool`, such as a
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index e5feda138..dd82be1d1 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1126,8 +1126,6 @@ class Connection(Connectable):
"""
try:
cursor.close()
- except (SystemExit, KeyboardInterrupt):
- raise
except Exception:
# log the error through the connection pool's logger.
self.engine.pool.logger.error(
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 838a5bdd2..2a1def86a 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -508,6 +508,10 @@ class Inspector(object):
table_name, schema, table, cols_by_orig_name,
include_columns, exclude_columns, reflection_options)
+ self._reflect_unique_constraints(
+ table_name, schema, table, cols_by_orig_name,
+ include_columns, exclude_columns, reflection_options)
+
def _reflect_column(
self, table, col_d, include_columns,
exclude_columns, cols_by_orig_name):
@@ -638,12 +642,15 @@ class Inspector(object):
columns = index_d['column_names']
unique = index_d['unique']
flavor = index_d.get('type', 'index')
+ duplicates = index_d.get('duplicates_constraint')
if include_columns and \
not set(columns).issubset(include_columns):
util.warn(
"Omitting %s key for (%s), key covers omitted columns." %
(flavor, ', '.join(columns)))
continue
+ if duplicates:
+ continue
# look for columns by orig name in cols_by_orig_name,
# but support columns that are in-Python only as fallback
idx_cols = []
@@ -661,3 +668,43 @@ class Inspector(object):
idx_cols.append(idx_col)
sa_schema.Index(name, *idx_cols, **dict(unique=unique))
+
+ def _reflect_unique_constraints(
+ self, table_name, schema, table, cols_by_orig_name,
+ include_columns, exclude_columns, reflection_options):
+
+ # Unique Constraints
+ try:
+ constraints = self.get_unique_constraints(table_name, schema)
+ except NotImplementedError:
+ # optional dialect feature
+ return
+
+ for const_d in constraints:
+ conname = const_d['name']
+ columns = const_d['column_names']
+ duplicates = const_d.get('duplicates_index')
+ if include_columns and \
+ not set(columns).issubset(include_columns):
+ util.warn(
+ "Omitting unique constraint key for (%s), "
+ "key covers omitted columns." %
+ ', '.join(columns))
+ continue
+ if duplicates:
+ continue
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ constrained_cols = []
+ for c in columns:
+ try:
+ constrained_col = cols_by_orig_name[c] \
+ if c in cols_by_orig_name else table.c[c]
+ except KeyError:
+ util.warn(
+ "unique constraint key '%s' was not located in "
+ "columns for table '%s'" % (c, table_name))
+ else:
+ constrained_cols.append(constrained_col)
+ table.append_constraint(
+ sa_schema.UniqueConstraint(*constrained_cols, name=conname))
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index 38206be89..398ef8df6 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -162,6 +162,7 @@ class DefaultEngineStrategy(EngineStrategy):
def first_connect(dbapi_connection, connection_record):
c = base.Connection(engine, connection=dbapi_connection,
_has_events=False)
+ c._execution_options = util.immutabledict()
dialect.initialize(c)
event.listen(pool, 'first_connect', first_connect, once=True)
diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py
index 86bd3653b..c144902cd 100644
--- a/lib/sqlalchemy/events.py
+++ b/lib/sqlalchemy/events.py
@@ -338,7 +338,7 @@ class PoolEvents(event.Events):
"""
- def reset(self, dbapi_connnection, connection_record):
+ def reset(self, dbapi_connection, connection_record):
"""Called before the "reset" action occurs for a pooled connection.
This event represents
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index a82bae33f..3271d09d4 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -238,14 +238,16 @@ class StatementError(SQLAlchemyError):
def __str__(self):
from sqlalchemy.sql import util
- params_repr = util._repr_params(self.params, 10)
+ details = [SQLAlchemyError.__str__(self)]
+ if self.statement:
+ details.append("[SQL: %r]" % self.statement)
+ if self.params:
+ params_repr = util._repr_params(self.params, 10)
+ details.append("[parameters: %r]" % params_repr)
return ' '.join([
"(%s)" % det for det in self.detail
- ] + [
- SQLAlchemyError.__str__(self),
- repr(self.statement), repr(params_repr)
- ])
+ ] + details)
def __unicode__(self):
return self.__str__()
@@ -280,17 +282,19 @@ class DBAPIError(StatementError):
connection_invalidated=False):
# Don't ever wrap these, just return them directly as if
# DBAPIError didn't exist.
- if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)):
+ if (isinstance(orig, BaseException) and
+ not isinstance(orig, Exception)) or \
+ isinstance(orig, DontWrapMixin):
return orig
if orig is not None:
# not a DBAPI error, statement is present.
# raise a StatementError
if not isinstance(orig, dbapi_base_err) and statement:
- msg = traceback.format_exception_only(
- orig.__class__, orig)[-1].strip()
return StatementError(
- "%s (original cause: %s)" % (str(orig), msg),
+ "(%s.%s) %s" %
+ (orig.__class__.__module__, orig.__class__.__name__,
+ orig),
statement, params, orig
)
@@ -310,13 +314,12 @@ class DBAPIError(StatementError):
def __init__(self, statement, params, orig, connection_invalidated=False):
try:
text = str(orig)
- except (KeyboardInterrupt, SystemExit):
- raise
except Exception as e:
text = 'Error in str() of DB-API-generated exception: ' + str(e)
StatementError.__init__(
self,
- '(%s) %s' % (orig.__class__.__name__, text),
+ '(%s.%s) %s' % (
+ orig.__class__.__module__, orig.__class__.__name__, text, ),
statement,
params,
orig
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index e84b21ad2..66fe05fd0 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -547,7 +547,7 @@ class AbstractConcreteBase(ConcreteBase):
for scls in cls.__subclasses__():
sm = _mapper_or_none(scls)
- if sm.concrete and cls in scls.__bases__:
+ if sm and sm.concrete and cls in scls.__bases__:
sm._set_concrete_base(m)
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 2ab239f86..863dab5cb 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -1581,6 +1581,8 @@ class Mapper(InspectionAttr):
self,
prop,
))
+ oldprop = self._props[key]
+ self._path_registry.pop(oldprop, None)
self._props[key] = prop
@@ -2654,7 +2656,7 @@ def configure_mappers():
mapper._expire_memoizations()
mapper.dispatch.mapper_configured(
mapper, mapper.class_)
- except:
+ except Exception:
exc = sys.exc_info()[1]
if not hasattr(exc, '_configure_failed'):
mapper._configure_failed = exc
diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py
index f10a125a8..d4dbf29a0 100644
--- a/lib/sqlalchemy/orm/path_registry.py
+++ b/lib/sqlalchemy/orm/path_registry.py
@@ -13,6 +13,9 @@ from .. import util
from .. import exc
from itertools import chain
from .base import class_mapper
+import logging
+
+log = logging.getLogger(__name__)
def _unreduce_path(path):
@@ -54,9 +57,11 @@ class PathRegistry(object):
self.path == other.path
def set(self, attributes, key, value):
+ log.debug("set '%s' on path '%s' to '%s'", key, self, value)
attributes[(key, self.path)] = value
def setdefault(self, attributes, key, value):
+ log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
attributes.setdefault((key, self.path), value)
def get(self, attributes, key, value=None):
@@ -184,6 +189,11 @@ class PropRegistry(PathRegistry):
self.parent = parent
self.path = parent.path + (prop,)
+ def __str__(self):
+ return " -> ".join(
+ str(elem) for elem in self.path
+ )
+
@util.memoized_property
def has_entity(self):
return hasattr(self.prop, "mapper")
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 74e69e44c..6b8d5af14 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -18,7 +18,7 @@ import operator
from itertools import groupby
from .. import sql, util, exc as sa_exc, schema
from . import attributes, sync, exc as orm_exc, evaluator
-from .base import state_str, _attr_as_key
+from .base import state_str, _attr_as_key, _entity_descriptor
from ..sql import expression
from . import loading
@@ -375,12 +375,12 @@ def _collect_update_commands(uowtransaction, table, states_to_update):
params[col.key] = history.added[0]
else:
pk_params[col._label] = history.unchanged[0]
+ if pk_params[col._label] is None:
+ raise orm_exc.FlushError(
+ "Can't update table %s using NULL for primary "
+ "key value on column %s" % (table, col))
if params or value_params:
- if None in pk_params.values():
- raise orm_exc.FlushError(
- "Can't update table using NULL for primary "
- "key value")
params.update(pk_params)
yield (
state, state_dict, params, mapper,
@@ -441,9 +441,9 @@ def _collect_delete_commands(base_mapper, uowtransaction, table,
state, state_dict, col)
if value is None:
raise orm_exc.FlushError(
- "Can't delete from table "
+ "Can't delete from table %s "
"using NULL for primary "
- "key value")
+ "key value on column %s" % (table, col))
if update_version_id is not None and \
table.c.contains_column(mapper.version_id_col):
@@ -987,6 +987,7 @@ class BulkUpdate(BulkUD):
super(BulkUpdate, self).__init__(query)
self.query._no_select_modifiers("update")
self.values = values
+ self.mapper = self.query._mapper_zero_or_none()
@classmethod
def factory(cls, query, synchronize_session, values):
@@ -996,9 +997,40 @@ class BulkUpdate(BulkUD):
False: BulkUpdate
}, synchronize_session, query, values)
+ def _resolve_string_to_expr(self, key):
+ if self.mapper and isinstance(key, util.string_types):
+ attr = _entity_descriptor(self.mapper, key)
+ return attr.__clause_element__()
+ else:
+ return key
+
+ def _resolve_key_to_attrname(self, key):
+ if self.mapper and isinstance(key, util.string_types):
+ attr = _entity_descriptor(self.mapper, key)
+ return attr.property.key
+ elif isinstance(key, attributes.InstrumentedAttribute):
+ return key.key
+ elif hasattr(key, '__clause_element__'):
+ key = key.__clause_element__()
+
+ if self.mapper and isinstance(key, expression.ColumnElement):
+ try:
+ attr = self.mapper._columntoproperty[key]
+ except orm_exc.UnmappedColumnError:
+ return None
+ else:
+ return attr.key
+ else:
+ raise sa_exc.InvalidRequestError(
+ "Invalid expression type: %r" % key)
+
def _do_exec(self):
+ values = dict(
+ (self._resolve_string_to_expr(k), v)
+ for k, v in self.values.items()
+ )
update_stmt = sql.update(self.primary_table,
- self.context.whereclause, self.values)
+ self.context.whereclause, values)
self.result = self.query.session.execute(
update_stmt, params=self.query._params)
@@ -1044,9 +1076,10 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
def _additional_evaluators(self, evaluator_compiler):
self.value_evaluators = {}
for key, value in self.values.items():
- key = _attr_as_key(key)
- self.value_evaluators[key] = evaluator_compiler.process(
- expression._literal_as_binds(value))
+ key = self._resolve_key_to_attrname(key)
+ if key is not None:
+ self.value_evaluators[key] = evaluator_compiler.process(
+ expression._literal_as_binds(value))
def _do_post_synchronize(self):
session = self.query.session
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 7b2ea7977..f07060825 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -1835,6 +1835,11 @@ class Query(object):
left_entity = prop = None
+ if isinstance(onclause, interfaces.PropComparator):
+ of_type = getattr(onclause, '_of_type', None)
+ else:
+ of_type = None
+
if isinstance(onclause, util.string_types):
left_entity = self._joinpoint_zero()
@@ -1861,8 +1866,6 @@ class Query(object):
if isinstance(onclause, interfaces.PropComparator):
if right_entity is None:
- right_entity = onclause.property.mapper
- of_type = getattr(onclause, '_of_type', None)
if of_type:
right_entity = of_type
else:
@@ -1944,11 +1947,9 @@ class Query(object):
from_obj, r_info.selectable):
overlap = True
break
- elif sql_util.selectables_overlap(l_info.selectable,
- r_info.selectable):
- overlap = True
- if overlap and l_info.selectable is r_info.selectable:
+ if (overlap or not create_aliases) and \
+ l_info.selectable is r_info.selectable:
raise sa_exc.InvalidRequestError(
"Can't join table/selectable '%s' to itself" %
l_info.selectable)
@@ -2756,9 +2757,25 @@ class Query(object):
Updates rows matched by this query in the database.
- :param values: a dictionary with attributes names as keys and literal
+ E.g.::
+
+ sess.query(User).filter(User.age == 25).\
+ update({User.age: User.age - 10}, synchronize_session='fetch')
+
+
+ sess.query(User).filter(User.age == 25).\
+ update({"age": User.age - 10}, synchronize_session='evaluate')
+
+
+ :param values: a dictionary with attributes names, or alternatively
+ mapped attributes or SQL expressions, as keys, and literal
values or sql expressions as values.
+ .. versionchanged:: 1.0.0 - string names in the values dictionary
+ are now resolved against the mapped entity; previously, these
+ strings were passed as literal column names with no mapper-level
+ translation.
+
:param synchronize_session: chooses the strategy to update the
attributes on objects in the session. Valid values are:
@@ -2796,7 +2813,7 @@ class Query(object):
which normally occurs upon :meth:`.Session.commit` or can be forced
by using :meth:`.Session.expire_all`.
- * As of 0.8, this method will support multiple table updates, as
+ * The method supports multiple table updates, as
detailed in :ref:`multi_table_updates`, and this behavior does
extend to support updates of joined-inheritance and other multiple
table mappings. However, the **join condition of an inheritance
@@ -2827,12 +2844,6 @@ class Query(object):
"""
- # TODO: value keys need to be mapped to corresponding sql cols and
- # instr.attr.s to string keys
- # TODO: updates of manytoone relationships need to be converted to
- # fk assignments
- # TODO: cascades need handling.
-
update_op = persistence.BulkUpdate.factory(
self, synchronize_session, values)
update_op.exec_()
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index 56a33742d..86f1b3f82 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -16,6 +16,7 @@ and `secondaryjoin` aspects of :func:`.relationship`.
from __future__ import absolute_import
from .. import sql, util, exc as sa_exc, schema, log
+import weakref
from .util import CascadeOptions, _orm_annotate, _orm_deannotate
from . import dependency
from . import attributes
@@ -1532,6 +1533,7 @@ class RelationshipProperty(StrategizedProperty):
self._check_cascade_settings(self._cascade)
self._post_init()
self._generate_backref()
+ self._join_condition._warn_for_conflicting_sync_targets()
super(RelationshipProperty, self).do_init()
self._lazy_strategy = self._get_strategy((("lazy", "select"),))
@@ -2519,6 +2521,60 @@ class JoinCondition(object):
self.secondary_synchronize_pairs = \
self._deannotate_pairs(secondary_sync_pairs)
+ _track_overlapping_sync_targets = weakref.WeakKeyDictionary()
+
+ def _warn_for_conflicting_sync_targets(self):
+ if not self.support_sync:
+ return
+
+ # we would like to detect if we are synchronizing any column
+ # pairs in conflict with another relationship that wishes to sync
+ # an entirely different column to the same target. This is a
+ # very rare edge case so we will try to minimize the memory/overhead
+ # impact of this check
+ for from_, to_ in [
+ (from_, to_) for (from_, to_) in self.synchronize_pairs
+ ] + [
+ (from_, to_) for (from_, to_) in self.secondary_synchronize_pairs
+ ]:
+ # save ourselves a ton of memory and overhead by only
+ # considering columns that are subject to a overlapping
+ # FK constraints at the core level. This condition can arise
+ # if multiple relationships overlap foreign() directly, but
+ # we're going to assume it's typically a ForeignKeyConstraint-
+ # level configuration that benefits from this warning.
+ if len(to_.foreign_keys) < 2:
+ continue
+
+ if to_ not in self._track_overlapping_sync_targets:
+ self._track_overlapping_sync_targets[to_] = \
+ weakref.WeakKeyDictionary({self.prop: from_})
+ else:
+ other_props = []
+ prop_to_from = self._track_overlapping_sync_targets[to_]
+ for pr, fr_ in prop_to_from.items():
+ if pr.mapper in mapperlib._mapper_registry and \
+ fr_ is not from_ and \
+ pr not in self.prop._reverse_property:
+ other_props.append((pr, fr_))
+
+ if other_props:
+ util.warn(
+ "relationship '%s' will copy column %s to column %s, "
+ "which conflicts with relationship(s): %s. "
+ "Consider applying "
+ "viewonly=True to read-only relationships, or provide "
+ "a primaryjoin condition marking writable columns "
+ "with the foreign() annotation." % (
+ self.prop,
+ from_, to_,
+ ", ".join(
+ "'%s' (copies %s to %s)" % (pr, fr_, to_)
+ for (pr, fr_) in other_props)
+ )
+ )
+ self._track_overlapping_sync_targets[to_][self.prop] = from_
+
@util.memoized_property
def remote_columns(self):
return self._gather_join_annotations("remote")
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 13afcb357..f23983cbc 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -292,7 +292,7 @@ class SessionTransaction(object):
for s in self.session.identity_map.all_states():
s._expire(s.dict, self.session.identity_map._modified)
for s in self._deleted:
- s.session_id = None
+ s._detach()
self._deleted.clear()
elif self.nested:
self._parent._new.update(self._new)
@@ -641,14 +641,8 @@ class Session(_SessionClassMethods):
SessionExtension._adapt_listener(self, ext)
if binds is not None:
- for mapperortable, bind in binds.items():
- insp = inspect(mapperortable)
- if insp.is_selectable:
- self.bind_table(mapperortable, bind)
- elif insp.is_mapper:
- self.bind_mapper(mapperortable, bind)
- else:
- assert False
+ for key, bind in binds.items():
+ self._add_bind(key, bind)
if not self.autocommit:
self.begin()
@@ -1026,40 +1020,47 @@ class Session(_SessionClassMethods):
# TODO: + crystallize + document resolution order
# vis. bind_mapper/bind_table
- def bind_mapper(self, mapper, bind):
- """Bind operations for a mapper to a Connectable.
-
- mapper
- A mapper instance or mapped class
+ def _add_bind(self, key, bind):
+ try:
+ insp = inspect(key)
+ except sa_exc.NoInspectionAvailable:
+ if not isinstance(key, type):
+ raise exc.ArgumentError(
+ "Not acceptable bind target: %s" %
+ key)
+ else:
+ self.__binds[key] = bind
+ else:
+ if insp.is_selectable:
+ self.__binds[insp] = bind
+ elif insp.is_mapper:
+ self.__binds[insp.class_] = bind
+ for selectable in insp._all_tables:
+ self.__binds[selectable] = bind
+ else:
+ raise exc.ArgumentError(
+ "Not acceptable bind target: %s" %
+ key)
- bind
- Any Connectable: a :class:`.Engine` or :class:`.Connection`.
+ def bind_mapper(self, mapper, bind):
+ """Associate a :class:`.Mapper` with a "bind", e.g. a :class:`.Engine`
+ or :class:`.Connection`.
- All subsequent operations involving this mapper will use the given
- `bind`.
+ The given mapper is added to a lookup used by the
+ :meth:`.Session.get_bind` method.
"""
- if isinstance(mapper, type):
- mapper = class_mapper(mapper)
-
- self.__binds[mapper.base_mapper] = bind
- for t in mapper._all_tables:
- self.__binds[t] = bind
+ self._add_bind(mapper, bind)
def bind_table(self, table, bind):
- """Bind operations on a Table to a Connectable.
+ """Associate a :class:`.Table` with a "bind", e.g. a :class:`.Engine`
+ or :class:`.Connection`.
- table
- A :class:`.Table` instance
-
- bind
- Any Connectable: a :class:`.Engine` or :class:`.Connection`.
-
- All subsequent operations involving this :class:`.Table` will use the
- given `bind`.
+ The given mapper is added to a lookup used by the
+ :meth:`.Session.get_bind` method.
"""
- self.__binds[table] = bind
+ self._add_bind(table, bind)
def get_bind(self, mapper=None, clause=None):
"""Return a "bind" to which this :class:`.Session` is bound.
@@ -1113,6 +1114,7 @@ class Session(_SessionClassMethods):
bound :class:`.MetaData`.
"""
+
if mapper is clause is None:
if self.bind:
return self.bind
@@ -1122,15 +1124,23 @@ class Session(_SessionClassMethods):
"Connection, and no context was provided to locate "
"a binding.")
- c_mapper = mapper is not None and _class_to_mapper(mapper) or None
+ if mapper is not None:
+ try:
+ mapper = inspect(mapper)
+ except sa_exc.NoInspectionAvailable:
+ if isinstance(mapper, type):
+ raise exc.UnmappedClassError(mapper)
+ else:
+ raise
- # manually bound?
if self.__binds:
- if c_mapper:
- if c_mapper.base_mapper in self.__binds:
- return self.__binds[c_mapper.base_mapper]
- elif c_mapper.mapped_table in self.__binds:
- return self.__binds[c_mapper.mapped_table]
+ if mapper:
+ for cls in mapper.class_.__mro__:
+ if cls in self.__binds:
+ return self.__binds[cls]
+ if clause is None:
+ clause = mapper.mapped_table
+
if clause is not None:
for t in sql_util.find_tables(clause, include_crud=True):
if t in self.__binds:
@@ -1142,12 +1152,12 @@ class Session(_SessionClassMethods):
if isinstance(clause, sql.expression.ClauseElement) and clause.bind:
return clause.bind
- if c_mapper and c_mapper.mapped_table.bind:
- return c_mapper.mapped_table.bind
+ if mapper and mapper.mapped_table.bind:
+ return mapper.mapped_table.bind
context = []
if mapper is not None:
- context.append('mapper %s' % c_mapper)
+ context.append('mapper %s' % mapper)
if clause is not None:
context.append('SQL expression')
@@ -1399,6 +1409,7 @@ class Session(_SessionClassMethods):
state._detach()
elif self.transaction:
self.transaction._deleted.pop(state, None)
+ state._detach()
def _register_newly_persistent(self, states):
for state in states:
@@ -2439,16 +2450,19 @@ def make_transient_to_detached(instance):
def object_session(instance):
- """Return the ``Session`` to which instance belongs.
+ """Return the :class:`.Session` to which the given instance belongs.
- If the instance is not a mapped instance, an error is raised.
+ This is essentially the same as the :attr:`.InstanceState.session`
+ accessor. See that attribute for details.
"""
try:
- return _state_session(attributes.instance_state(instance))
+ state = attributes.instance_state(instance)
except exc.NO_STATE:
raise exc.UnmappedInstanceError(instance)
+ else:
+ return _state_session(state)
_new_sessionid = util.counter()
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 3c12fda1a..560149de5 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -145,7 +145,16 @@ class InstanceState(interfaces.InspectionAttr):
@util.dependencies("sqlalchemy.orm.session")
def session(self, sessionlib):
"""Return the owning :class:`.Session` for this instance,
- or ``None`` if none available."""
+ or ``None`` if none available.
+
+ Note that the result here can in some cases be *different*
+ from that of ``obj in session``; an object that's been deleted
+ will report as not ``in session``, however if the transaction is
+ still in progress, this attribute will still refer to that session.
+ Only when the transaction is completed does the object become
+ fully detached under normal circumstances.
+
+ """
return sessionlib._state_session(self)
@property
@@ -258,8 +267,8 @@ class InstanceState(interfaces.InspectionAttr):
try:
return manager.original_init(*mixed[1:], **kwargs)
except:
- manager.dispatch.init_failure(self, args, kwargs)
- raise
+ with util.safe_reraise():
+ manager.dispatch.init_failure(self, args, kwargs)
def get_history(self, key, passive):
return self.manager[key].impl.get_history(self, self.dict, passive)
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index cdb501c14..d95f17f64 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -373,7 +373,7 @@ class LazyLoader(AbstractRelationshipLoader):
self._equated_columns[c] = self._equated_columns[col]
self.logger.info("%s will use query.get() to "
- "optimize instance loads" % self)
+ "optimize instance loads", self)
def init_class_attribute(self, mapper):
self.is_class_level = True
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
index 4f986193e..276da2ae0 100644
--- a/lib/sqlalchemy/orm/strategy_options.py
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -161,11 +161,14 @@ class Load(Generative, MapperOption):
ext_info = inspect(ac)
path_element = ext_info.mapper
+ existing = path.entity_path[prop].get(
+ self.context, "path_with_polymorphic")
if not ext_info.is_aliased_class:
ac = orm_util.with_polymorphic(
ext_info.mapper.base_mapper,
ext_info.mapper, aliased=True,
- _use_mapper_path=True)
+ _use_mapper_path=True,
+ _existing_alias=existing)
path.entity_path[prop].set(
self.context, "path_with_polymorphic", inspect(ac))
path = path[prop][path_element]
@@ -176,6 +179,9 @@ class Load(Generative, MapperOption):
path = path.entity_path
return path
+ def __str__(self):
+ return "Load(strategy=%r)" % self.strategy
+
def _coerce_strat(self, strategy):
if strategy is not None:
strategy = tuple(sorted(strategy.items()))
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 734f9d5e6..4be8d19ff 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -30,13 +30,10 @@ class CascadeOptions(frozenset):
'all', 'none', 'delete-orphan'])
_allowed_cascades = all_cascades
- def __new__(cls, arg):
- values = set([
- c for c
- in re.split('\s*,\s*', arg or "")
- if c
- ])
-
+ def __new__(cls, value_list):
+ if isinstance(value_list, str) or value_list is None:
+ return cls.from_string(value_list)
+ values = set(value_list)
if values.difference(cls._allowed_cascades):
raise sa_exc.ArgumentError(
"Invalid cascade option(s): %s" %
@@ -70,6 +67,14 @@ class CascadeOptions(frozenset):
",".join([x for x in sorted(self)])
)
+ @classmethod
+ def from_string(cls, arg):
+ values = [
+ c for c
+ in re.split('\s*,\s*', arg or "")
+ if c
+ ]
+ return cls(values)
def _validator_events(
desc, key, validator, include_removes, include_backrefs):
@@ -538,8 +543,13 @@ class AliasedInsp(InspectionAttr):
mapper, self)
def __repr__(self):
- return '<AliasedInsp at 0x%x; %s>' % (
- id(self), self.class_.__name__)
+ if self.with_polymorphic_mappers:
+ with_poly = "(%s)" % ", ".join(
+ mp.class_.__name__ for mp in self.with_polymorphic_mappers)
+ else:
+ with_poly = ""
+ return '<AliasedInsp at 0x%x; %s%s>' % (
+ id(self), self.class_.__name__, with_poly)
inspection._inspects(AliasedClass)(lambda target: target._aliased_insp)
@@ -643,7 +653,8 @@ def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False):
def with_polymorphic(base, classes, selectable=False,
flat=False,
polymorphic_on=None, aliased=False,
- innerjoin=False, _use_mapper_path=False):
+ innerjoin=False, _use_mapper_path=False,
+ _existing_alias=None):
"""Produce an :class:`.AliasedClass` construct which specifies
columns for descendant mappers of the given base.
@@ -708,6 +719,16 @@ def with_polymorphic(base, classes, selectable=False,
only be specified if querying for one specific subtype only
"""
primary_mapper = _class_to_mapper(base)
+ if _existing_alias:
+ assert _existing_alias.mapper is primary_mapper
+ classes = util.to_set(classes)
+ new_classes = set([
+ mp.class_ for mp in
+ _existing_alias.with_polymorphic_mappers])
+ if classes == new_classes:
+ return _existing_alias
+ else:
+ classes = classes.union(new_classes)
mappers, selectable = primary_mapper.\
_with_polymorphic_args(classes, selectable,
innerjoin=innerjoin)
@@ -804,6 +825,16 @@ class _ORMJoin(expression.Join):
expression.Join.__init__(self, left, right, onclause, isouter)
+ if not prop and getattr(right_info, 'mapper', None) \
+ and right_info.mapper.single:
+ # if single inheritance target and we are using a manual
+ # or implicit ON clause, augment it the same way we'd augment the
+ # WHERE.
+ single_crit = right_info.mapper._single_table_criterion
+ if right_info.is_aliased_class:
+ single_crit = right_info._adapter.traverse(single_crit)
+ self.onclause = self.onclause & single_crit
+
def join(self, right, onclause=None, isouter=False, join_to_left=None):
return _ORMJoin(self, right, onclause, isouter)
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index bc9affe4a..a174df784 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -248,9 +248,7 @@ class Pool(log.Identified):
self.logger.debug("Closing connection %r", connection)
try:
self._dialect.do_close(connection)
- except (SystemExit, KeyboardInterrupt):
- raise
- except:
+ except Exception:
self.logger.error("Exception closing connection %r",
connection, exc_info=True)
@@ -441,8 +439,8 @@ class _ConnectionRecord(object):
try:
dbapi_connection = rec.get_connection()
except:
- rec.checkin()
- raise
+ with util.safe_reraise():
+ rec.checkin()
echo = pool._should_log_debug()
fairy = _ConnectionFairy(dbapi_connection, rec, echo)
rec.fairy_ref = weakref.ref(
@@ -569,12 +567,12 @@ def _finalize_fairy(connection, connection_record,
# Immediately close detached instances
if not connection_record:
pool._close_connection(connection)
- except Exception as e:
+ except BaseException as e:
pool.logger.error(
"Exception during reset or similar", exc_info=True)
if connection_record:
connection_record.invalidate(e=e)
- if isinstance(e, (SystemExit, KeyboardInterrupt)):
+ if not isinstance(e, Exception):
raise
if connection_record:
@@ -842,9 +840,7 @@ class SingletonThreadPool(Pool):
for conn in self._all_conns:
try:
conn.close()
- except (SystemExit, KeyboardInterrupt):
- raise
- except:
+ except Exception:
# pysqlite won't even let you close a conn from a thread
# that didn't create it
pass
@@ -962,8 +958,8 @@ class QueuePool(Pool):
try:
return self._create_connection()
except:
- self._dec_overflow()
- raise
+ with util.safe_reraise():
+ self._dec_overflow()
else:
return self._do_get()
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index 4d013859c..351e08d0b 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -38,6 +38,7 @@ from .expression import (
false,
False_,
func,
+ funcfilter,
insert,
intersect,
intersect_all,
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 18b4d4cfc..b102f0240 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -746,6 +746,12 @@ class SQLCompiler(Compiled):
)
)
+ def visit_funcfilter(self, funcfilter, **kwargs):
+ return "%s FILTER (WHERE %s)" % (
+ funcfilter.func._compiler_dispatch(self, **kwargs),
+ funcfilter.criterion._compiler_dispatch(self, **kwargs)
+ )
+
def visit_extract(self, extract, **kwargs):
field = self.extract_map.get(extract.field, extract.field)
return "EXTRACT(%s FROM %s)" % (
@@ -807,7 +813,7 @@ class SQLCompiler(Compiled):
text += self.order_by_clause(cs, **kwargs)
text += (cs._limit_clause is not None
or cs._offset_clause is not None) and \
- self.limit_clause(cs) or ""
+ self.limit_clause(cs, **kwargs) or ""
if self.ctes and \
compound_index == 0 and toplevel:
@@ -1723,6 +1729,12 @@ class SQLCompiler(Compiled):
)
def visit_insert(self, insert_stmt, **kw):
+ self.stack.append(
+ {'correlate_froms': set(),
+ "iswrapper": False,
+ "asfrom_froms": set(),
+ "selectable": insert_stmt})
+
self.isinsert = True
crud_params = crud._get_crud_params(self, insert_stmt, **kw)
@@ -1787,7 +1799,7 @@ class SQLCompiler(Compiled):
text += " " + returning_clause
if insert_stmt.select is not None:
- text += " %s" % self.process(insert_stmt.select, **kw)
+ text += " %s" % self.process(self._insert_from_select, **kw)
elif not crud_params and supports_default_values:
text += " DEFAULT VALUES"
elif insert_stmt._has_multi_parameters:
@@ -1806,6 +1818,8 @@ class SQLCompiler(Compiled):
if self.returning and not self.returning_precedes_values:
text += " " + returning_clause
+ self.stack.pop(-1)
+
return text
def update_limit_clause(self, update_stmt):
@@ -2272,14 +2286,14 @@ class DDLCompiler(Compiled):
formatted_name = self.preparer.format_constraint(constraint)
if formatted_name is not None:
text += "CONSTRAINT %s " % formatted_name
- remote_table = list(constraint._elements.values())[0].column.table
+ remote_table = list(constraint.elements)[0].column.table
text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
', '.join(preparer.quote(f.parent.name)
- for f in constraint._elements.values()),
+ for f in constraint.elements),
self.define_constraint_remote_table(
constraint, remote_table, preparer),
', '.join(preparer.quote(f.column.name)
- for f in constraint._elements.values())
+ for f in constraint.elements)
)
text += self.define_constraint_match(constraint)
text += self.define_constraint_cascades(constraint)
diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py
index 1c1f661d2..831d05be1 100644
--- a/lib/sqlalchemy/sql/crud.py
+++ b/lib/sqlalchemy/sql/crud.py
@@ -89,18 +89,15 @@ def _get_crud_params(compiler, stmt, **kw):
_col_bind_name, _getattr_col_key, values, kw)
if compiler.isinsert and stmt.select_names:
- # for an insert from select, we can only use names that
- # are given, so only select for those names.
- cols = (stmt.table.c[_column_as_key(name)]
- for name in stmt.select_names)
+ _scan_insert_from_select_cols(
+ compiler, stmt, parameters,
+ _getattr_col_key, _column_as_key,
+ _col_bind_name, check_columns, values, kw)
else:
- # iterate through all table columns to maintain
- # ordering, even for those cols that aren't included
- cols = stmt.table.columns
-
- _scan_cols(
- compiler, stmt, cols, parameters,
- _getattr_col_key, _col_bind_name, check_columns, values, kw)
+ _scan_cols(
+ compiler, stmt, parameters,
+ _getattr_col_key, _column_as_key,
+ _col_bind_name, check_columns, values, kw)
if parameters and stmt_parameters:
check = set(parameters).intersection(
@@ -118,13 +115,17 @@ def _get_crud_params(compiler, stmt, **kw):
return values
-def _create_bind_param(compiler, col, value, required=False, name=None):
+def _create_bind_param(
+ compiler, col, value, process=True, required=False, name=None):
if name is None:
name = col.key
bindparam = elements.BindParameter(name, value,
type_=col.type, required=required)
bindparam._is_crud = True
- return bindparam._compiler_dispatch(compiler)
+ if process:
+ bindparam = bindparam._compiler_dispatch(compiler)
+ return bindparam
+
def _key_getters_for_crud_column(compiler):
if compiler.isupdate and compiler.statement._extra_froms:
@@ -162,14 +163,52 @@ def _key_getters_for_crud_column(compiler):
return _column_as_key, _getattr_col_key, _col_bind_name
+def _scan_insert_from_select_cols(
+ compiler, stmt, parameters, _getattr_col_key,
+ _column_as_key, _col_bind_name, check_columns, values, kw):
+
+ need_pks, implicit_returning, \
+ implicit_return_defaults, postfetch_lastrowid = \
+ _get_returning_modifiers(compiler, stmt)
+
+ cols = [stmt.table.c[_column_as_key(name)]
+ for name in stmt.select_names]
+
+ compiler._insert_from_select = stmt.select
+
+ add_select_cols = []
+ if stmt.include_insert_from_select_defaults:
+ col_set = set(cols)
+ for col in stmt.table.columns:
+ if col not in col_set and col.default:
+ cols.append(col)
+
+ for c in cols:
+ col_key = _getattr_col_key(c)
+ if col_key in parameters and col_key not in check_columns:
+ parameters.pop(col_key)
+ values.append((c, None))
+ else:
+ _append_param_insert_select_hasdefault(
+ compiler, stmt, c, add_select_cols, kw)
+
+ if add_select_cols:
+ values.extend(add_select_cols)
+ compiler._insert_from_select = compiler._insert_from_select._generate()
+ compiler._insert_from_select._raw_columns += tuple(
+ expr for col, expr in add_select_cols)
+
+
def _scan_cols(
- compiler, stmt, cols, parameters, _getattr_col_key,
- _col_bind_name, check_columns, values, kw):
+ compiler, stmt, parameters, _getattr_col_key,
+ _column_as_key, _col_bind_name, check_columns, values, kw):
need_pks, implicit_returning, \
implicit_return_defaults, postfetch_lastrowid = \
_get_returning_modifiers(compiler, stmt)
+ cols = stmt.table.columns
+
for c in cols:
col_key = _getattr_col_key(c)
if col_key in parameters and col_key not in check_columns:
@@ -196,7 +235,8 @@ def _scan_cols(
elif c.default is not None:
_append_param_insert_hasdefault(
- compiler, stmt, c, implicit_return_defaults, values, kw)
+ compiler, stmt, c, implicit_return_defaults,
+ values, kw)
elif c.server_default is not None:
if implicit_return_defaults and \
@@ -299,10 +339,8 @@ def _append_param_insert_hasdefault(
elif not c.primary_key:
compiler.postfetch.append(c)
elif c.default.is_clause_element:
- values.append(
- (c, compiler.process(
- c.default.arg.self_group(), **kw))
- )
+ proc = compiler.process(c.default.arg.self_group(), **kw)
+ values.append((c, proc))
if implicit_return_defaults and \
c in implicit_return_defaults:
@@ -317,6 +355,25 @@ def _append_param_insert_hasdefault(
compiler.prefetch.append(c)
+def _append_param_insert_select_hasdefault(
+ compiler, stmt, c, values, kw):
+
+ if c.default.is_sequence:
+ if compiler.dialect.supports_sequences and \
+ (not c.default.optional or
+ not compiler.dialect.sequences_optional):
+ proc = c.default
+ values.append((c, proc))
+ elif c.default.is_clause_element:
+ proc = c.default.arg.self_group()
+ values.append((c, proc))
+ else:
+ values.append(
+ (c, _create_bind_param(compiler, c, None, process=False))
+ )
+ compiler.prefetch.append(c)
+
+
def _append_param_update(
compiler, stmt, c, implicit_return_defaults, values, kw):
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
index 1934d0776..9f2ce7ce3 100644
--- a/lib/sqlalchemy/sql/dml.py
+++ b/lib/sqlalchemy/sql/dml.py
@@ -475,6 +475,7 @@ class Insert(ValuesBase):
ValuesBase.__init__(self, table, values, prefixes)
self._bind = bind
self.select = self.select_names = None
+ self.include_insert_from_select_defaults = False
self.inline = inline
self._returning = returning
self._validate_dialect_kwargs(dialect_kw)
@@ -487,7 +488,7 @@ class Insert(ValuesBase):
return ()
@_generative
- def from_select(self, names, select):
+ def from_select(self, names, select, include_defaults=True):
"""Return a new :class:`.Insert` construct which represents
an ``INSERT...FROM SELECT`` statement.
@@ -506,6 +507,21 @@ class Insert(ValuesBase):
is not checked before passing along to the database, the database
would normally raise an exception if these column lists don't
correspond.
+ :param include_defaults: if True, non-server default values and
+ SQL expressions as specified on :class:`.Column` objects
+ (as documented in :ref:`metadata_defaults_toplevel`) not
+ otherwise specified in the list of names will be rendered
+ into the INSERT and SELECT statements, so that these values are also
+ included in the data to be inserted.
+
+ .. note:: A Python-side default that uses a Python callable function
+ will only be invoked **once** for the whole statement, and **not
+ per row**.
+
+ .. versionadded:: 1.0.0 - :meth:`.Insert.from_select` now renders
+ Python-side and SQL expression column defaults into the
+ SELECT statement for columns otherwise not included in the
+ list of column names.
.. versionchanged:: 1.0.0 an INSERT that uses FROM SELECT
implies that the :paramref:`.insert.inline` flag is set to
@@ -514,13 +530,6 @@ class Insert(ValuesBase):
deals with an arbitrary number of rows, so the
:attr:`.ResultProxy.inserted_primary_key` accessor does not apply.
- .. note::
-
- A SELECT..INSERT construct in SQL has no VALUES clause. Therefore
- :class:`.Column` objects which utilize Python-side defaults
- (e.g. as described at :ref:`metadata_defaults_toplevel`)
- will **not** take effect when using :meth:`.Insert.from_select`.
-
.. versionadded:: 0.8.3
"""
@@ -533,6 +542,7 @@ class Insert(ValuesBase):
self.select_names = names
self.inline = True
+ self.include_insert_from_select_defaults = include_defaults
self.select = _interpret_as_select(select)
def _copy_internals(self, clone=_clone, **kw):
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index 8ec0aa700..734f78632 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -228,6 +228,7 @@ class ClauseElement(Visitable):
is_selectable = False
is_clause_element = True
+ description = None
_order_by_label_element = None
_is_from_container = False
@@ -540,7 +541,7 @@ class ClauseElement(Visitable):
__nonzero__ = __bool__
def __repr__(self):
- friendly = getattr(self, 'description', None)
+ friendly = self.description
if friendly is None:
return object.__repr__(self)
else:
@@ -860,6 +861,9 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
expressions and function calls.
"""
+ while self._is_clone_of is not None:
+ self = self._is_clone_of
+
return _anonymous_label(
'%%(%d %s)s' % (id(self), getattr(self, 'name', 'anon'))
)
@@ -1088,7 +1092,7 @@ class BindParameter(ColumnElement):
"""
if isinstance(key, ColumnClause):
type_ = key.type
- key = key.name
+ key = key.key
if required is NO_ARG:
required = (value is NO_ARG and callable_ is None)
if value is NO_ARG:
@@ -1616,10 +1620,10 @@ class Null(ColumnElement):
return type_api.NULLTYPE
@classmethod
- def _singleton(cls):
+ def _instance(cls):
"""Return a constant :class:`.Null` construct."""
- return NULL
+ return Null()
def compare(self, other):
return isinstance(other, Null)
@@ -1640,11 +1644,11 @@ class False_(ColumnElement):
return type_api.BOOLEANTYPE
def _negate(self):
- return TRUE
+ return True_()
@classmethod
- def _singleton(cls):
- """Return a constant :class:`.False_` construct.
+ def _instance(cls):
+ """Return a :class:`.False_` construct.
E.g.::
@@ -1678,7 +1682,7 @@ class False_(ColumnElement):
"""
- return FALSE
+ return False_()
def compare(self, other):
return isinstance(other, False_)
@@ -1699,17 +1703,17 @@ class True_(ColumnElement):
return type_api.BOOLEANTYPE
def _negate(self):
- return FALSE
+ return False_()
@classmethod
def _ifnone(cls, other):
if other is None:
- return cls._singleton()
+ return cls._instance()
else:
return other
@classmethod
- def _singleton(cls):
+ def _instance(cls):
"""Return a constant :class:`.True_` construct.
E.g.::
@@ -1744,15 +1748,11 @@ class True_(ColumnElement):
"""
- return TRUE
+ return True_()
def compare(self, other):
return isinstance(other, True_)
-NULL = Null()
-FALSE = False_()
-TRUE = True_()
-
class ClauseList(ClauseElement):
"""Describe a list of clauses, separated by an operator.
@@ -2782,6 +2782,10 @@ class Grouping(ColumnElement):
return self
@property
+ def _key_label(self):
+ return self._label
+
+ @property
def _label(self):
return getattr(self.element, '_label', None) or self.anon_label
@@ -2888,6 +2892,120 @@ class Over(ColumnElement):
))
+class FunctionFilter(ColumnElement):
+ """Represent a function FILTER clause.
+
+ This is a special operator against aggregate and window functions,
+ which controls which rows are passed to it.
+ It's supported only by certain database backends.
+
+ Invocation of :class:`.FunctionFilter` is via
+ :meth:`.FunctionElement.filter`::
+
+ func.count(1).filter(True)
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.FunctionElement.filter`
+
+ """
+ __visit_name__ = 'funcfilter'
+
+ criterion = None
+
+ def __init__(self, func, *criterion):
+ """Produce a :class:`.FunctionFilter` object against a function.
+
+ Used against aggregate and window functions,
+ for database backends that support the "FILTER" clause.
+
+ E.g.::
+
+ from sqlalchemy import funcfilter
+ funcfilter(func.count(1), MyClass.name == 'some name')
+
+ Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')".
+
+ This function is also available from the :data:`~.expression.func`
+ construct itself via the :meth:`.FunctionElement.filter` method.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.FunctionElement.filter`
+
+
+ """
+ self.func = func
+ self.filter(*criterion)
+
+ def filter(self, *criterion):
+ """Produce an additional FILTER against the function.
+
+ This method adds additional criteria to the initial criteria
+ set up by :meth:`.FunctionElement.filter`.
+
+ Multiple criteria are joined together at SQL render time
+ via ``AND``.
+
+
+ """
+
+ for criterion in list(criterion):
+ criterion = _expression_literal_as_text(criterion)
+
+ if self.criterion is not None:
+ self.criterion = self.criterion & criterion
+ else:
+ self.criterion = criterion
+
+ return self
+
+ def over(self, partition_by=None, order_by=None):
+ """Produce an OVER clause against this filtered function.
+
+ Used against aggregate or so-called "window" functions,
+ for database backends that support window functions.
+
+ The expression::
+
+ func.rank().filter(MyClass.y > 5).over(order_by='x')
+
+ is shorthand for::
+
+ from sqlalchemy import over, funcfilter
+ over(funcfilter(func.rank(), MyClass.y > 5), order_by='x')
+
+ See :func:`~.expression.over` for a full description.
+
+ """
+ return Over(self, partition_by=partition_by, order_by=order_by)
+
+ @util.memoized_property
+ def type(self):
+ return self.func.type
+
+ def get_children(self, **kwargs):
+ return [c for c in
+ (self.func, self.criterion)
+ if c is not None]
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.func = clone(self.func, **kw)
+ if self.criterion is not None:
+ self.criterion = clone(self.criterion, **kw)
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(
+ *[c._from_objects for c in (self.func, self.criterion)
+ if c is not None]
+ ))
+
+
class Label(ColumnElement):
"""Represents a column label (AS).
@@ -3217,7 +3335,7 @@ class ColumnClause(Immutable, ColumnElement):
return name
def _bind_param(self, operator, obj):
- return BindParameter(self.name, obj,
+ return BindParameter(self.key, obj,
_compared_to_operator=operator,
_compared_to_type=self.type,
unique=True)
@@ -3491,7 +3609,7 @@ def _string_or_unprintable(element):
else:
try:
return str(element)
- except:
+ except Exception:
return "unprintable element %r" % element
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index d96f048b9..2ffc5468c 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -36,7 +36,7 @@ from .elements import ClauseElement, ColumnElement,\
True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
Grouping, not_, \
collate, literal_column, between,\
- literal, outparam, type_coerce, ClauseList
+ literal, outparam, type_coerce, ClauseList, FunctionFilter
from .elements import SavepointClause, RollbackToSavepointClause, \
ReleaseSavepointClause
@@ -89,14 +89,16 @@ asc = public_factory(UnaryExpression._create_asc, ".expression.asc")
desc = public_factory(UnaryExpression._create_desc, ".expression.desc")
distinct = public_factory(
UnaryExpression._create_distinct, ".expression.distinct")
-true = public_factory(True_._singleton, ".expression.true")
-false = public_factory(False_._singleton, ".expression.false")
-null = public_factory(Null._singleton, ".expression.null")
+true = public_factory(True_._instance, ".expression.true")
+false = public_factory(False_._instance, ".expression.false")
+null = public_factory(Null._instance, ".expression.null")
join = public_factory(Join._create_join, ".expression.join")
outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin")
insert = public_factory(Insert, ".expression.insert")
update = public_factory(Update, ".expression.update")
delete = public_factory(Delete, ".expression.delete")
+funcfilter = public_factory(
+ FunctionFilter, ".expression.funcfilter")
# internal functions still being called from tests and the ORM,
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index 7efb1e916..9280c7d60 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -12,7 +12,7 @@ from . import sqltypes, schema
from .base import Executable, ColumnCollection
from .elements import ClauseList, Cast, Extract, _literal_as_binds, \
literal_column, _type_from_args, ColumnElement, _clone,\
- Over, BindParameter
+ Over, BindParameter, FunctionFilter
from .selectable import FromClause, Select, Alias
from . import operators
@@ -116,6 +116,35 @@ class FunctionElement(Executable, ColumnElement, FromClause):
"""
return Over(self, partition_by=partition_by, order_by=order_by)
+ def filter(self, *criterion):
+ """Produce a FILTER clause against this function.
+
+ Used against aggregate and window functions,
+ for database backends that support the "FILTER" clause.
+
+ The expression::
+
+ func.count(1).filter(True)
+
+ is shorthand for::
+
+ from sqlalchemy import funcfilter
+ funcfilter(func.count(1), True)
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :class:`.FunctionFilter`
+
+ :func:`.funcfilter`
+
+
+ """
+ if not criterion:
+ return self
+ return FunctionFilter(self, *criterion)
+
@property
def _from_objects(self):
return self.clauses._from_objects
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index 26d7c428e..4093d7115 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -412,8 +412,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
table.dispatch.after_parent_attach(table, metadata)
return table
except:
- metadata._remove_table(name, schema)
- raise
+ with util.safe_reraise():
+ metadata._remove_table(name, schema)
@property
@util.deprecated('0.9', 'Use ``table.schema.quote``')
@@ -728,7 +728,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
checkfirst=checkfirst)
def tometadata(self, metadata, schema=RETAIN_SCHEMA,
- referred_schema_fn=None):
+ referred_schema_fn=None, name=None):
"""Return a copy of this :class:`.Table` associated with a different
:class:`.MetaData`.
@@ -785,13 +785,21 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
.. versionadded:: 0.9.2
- """
+ :param name: optional string name indicating the target table name.
+ If not specified or None, the table name is retained. This allows
+ a :class:`.Table` to be copied to the same :class:`.MetaData` target
+ with a new name.
+
+ .. versionadded:: 1.0.0
+ """
+ if name is None:
+ name = self.name
if schema is RETAIN_SCHEMA:
schema = self.schema
elif schema is None:
schema = metadata.schema
- key = _get_table_key(self.name, schema)
+ key = _get_table_key(name, schema)
if key in metadata.tables:
util.warn("Table '%s' already exists within the given "
"MetaData - not copying." % self.description)
@@ -801,7 +809,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
for c in self.columns:
args.append(c.copy(schema=schema))
table = Table(
- self.name, metadata, schema=schema,
+ name, metadata, schema=schema,
*args, **self.kwargs
)
for c in self.constraints:
@@ -1061,8 +1069,8 @@ class Column(SchemaItem, ColumnClause):
conditionally rendered differently on different backends,
consider custom compilation rules for :class:`.CreateColumn`.
- ..versionadded:: 0.8.3 Added the ``system=True`` parameter to
- :class:`.Column`.
+ .. versionadded:: 0.8.3 Added the ``system=True`` parameter to
+ :class:`.Column`.
"""
@@ -1549,7 +1557,7 @@ class ForeignKey(DialectKWArgs, SchemaItem):
)
return self._schema_item_copy(fk)
- def _get_colspec(self, schema=None):
+ def _get_colspec(self, schema=None, table_name=None):
"""Return a string based 'column specification' for this
:class:`.ForeignKey`.
@@ -1559,7 +1567,15 @@ class ForeignKey(DialectKWArgs, SchemaItem):
"""
if schema:
_schema, tname, colname = self._column_tokens
+ if table_name is not None:
+ tname = table_name
return "%s.%s.%s" % (schema, tname, colname)
+ elif table_name:
+ schema, tname, colname = self._column_tokens
+ if schema:
+ return "%s.%s.%s" % (schema, table_name, colname)
+ else:
+ return "%s.%s" % (table_name, colname)
elif self._table_column is not None:
return "%s.%s" % (
self._table_column.table.fullname, self._table_column.key)
@@ -1788,7 +1804,7 @@ class ForeignKey(DialectKWArgs, SchemaItem):
match=self.match,
**self._unvalidated_dialect_kw
)
- self.constraint._elements[self.parent] = self
+ self.constraint._append_element(column, self)
self.constraint._set_parent_with_dispatch(table)
table.foreign_keys.add(self)
@@ -2473,7 +2489,7 @@ class CheckConstraint(Constraint):
return self._schema_item_copy(c)
-class ForeignKeyConstraint(Constraint):
+class ForeignKeyConstraint(ColumnCollectionConstraint):
"""A table-level FOREIGN KEY constraint.
Defines a single column or composite FOREIGN KEY ... REFERENCES
@@ -2548,9 +2564,10 @@ class ForeignKeyConstraint(Constraint):
.. versionadded:: 0.9.2
"""
- super(ForeignKeyConstraint, self).\
- __init__(name, deferrable, initially, info=info, **dialect_kw)
+ Constraint.__init__(
+ self, name=name, deferrable=deferrable, initially=initially,
+ info=info, **dialect_kw)
self.onupdate = onupdate
self.ondelete = ondelete
self.link_to_name = link_to_name
@@ -2559,14 +2576,12 @@ class ForeignKeyConstraint(Constraint):
self.use_alter = use_alter
self.match = match
- self._elements = util.OrderedDict()
-
# standalone ForeignKeyConstraint - create
# associated ForeignKey objects which will be applied to hosted
# Column objects (in col.foreign_keys), either now or when attached
# to the Table for string-specified names
- for col, refcol in zip(columns, refcolumns):
- self._elements[col] = ForeignKey(
+ self.elements = [
+ ForeignKey(
refcol,
_constraint=self,
name=self.name,
@@ -2578,25 +2593,36 @@ class ForeignKeyConstraint(Constraint):
deferrable=self.deferrable,
initially=self.initially,
**self.dialect_kwargs
- )
+ ) for refcol in refcolumns
+ ]
+ ColumnCollectionMixin.__init__(self, *columns)
if table is not None:
+ if hasattr(self, "parent"):
+ assert table is self.parent
self._set_parent_with_dispatch(table)
- elif columns and \
- isinstance(columns[0], Column) and \
- columns[0].table is not None:
- self._set_parent_with_dispatch(columns[0].table)
+
+ def _append_element(self, column, fk):
+ self.columns.add(column)
+ self.elements.append(fk)
+
+ @property
+ def _elements(self):
+ # legacy - provide a dictionary view of (column_key, fk)
+ return util.OrderedDict(
+ zip(self.column_keys, self.elements)
+ )
@property
def _referred_schema(self):
- for elem in self._elements.values():
+ for elem in self.elements:
return elem._referred_schema
else:
return None
def _validate_dest_table(self, table):
table_keys = set([elem._table_key()
- for elem in self._elements.values()])
+ for elem in self.elements])
if None not in table_keys and len(table_keys) > 1:
elem0, elem1 = sorted(table_keys)[0:2]
raise exc.ArgumentError(
@@ -2609,38 +2635,48 @@ class ForeignKeyConstraint(Constraint):
))
@property
- def _col_description(self):
- return ", ".join(self._elements)
+ def column_keys(self):
+ """Return a list of string keys representing the local
+ columns in this :class:`.ForeignKeyConstraint`.
- @property
- def columns(self):
- return list(self._elements)
+ This list is either the original string arguments sent
+ to the constructor of the :class:`.ForeignKeyConstraint`,
+ or if the constraint has been initialized with :class:`.Column`
+ objects, is the string .key of each element.
+
+ .. versionadded:: 1.0.0
+
+ """
+ if hasattr(self, "parent"):
+ return self.columns.keys()
+ else:
+ return [
+ col.key if isinstance(col, ColumnElement)
+ else str(col) for col in self._pending_colargs
+ ]
@property
- def elements(self):
- return list(self._elements.values())
+ def _col_description(self):
+ return ", ".join(self.column_keys)
def _set_parent(self, table):
- super(ForeignKeyConstraint, self)._set_parent(table)
-
- self._validate_dest_table(table)
+ Constraint._set_parent(self, table)
- for col, fk in self._elements.items():
- # string-specified column names now get
- # resolved to Column objects
- if isinstance(col, util.string_types):
- try:
- col = table.c[col]
- except KeyError:
- raise exc.ArgumentError(
- "Can't create ForeignKeyConstraint "
- "on table '%s': no column "
- "named '%s' is present." % (table.description, col))
+ try:
+ ColumnCollectionConstraint._set_parent(self, table)
+ except KeyError as ke:
+ raise exc.ArgumentError(
+ "Can't create ForeignKeyConstraint "
+ "on table '%s': no column "
+ "named '%s' is present." % (table.description, ke.args[0]))
+ for col, fk in zip(self.columns, self.elements):
if not hasattr(fk, 'parent') or \
fk.parent is not col:
fk._set_parent_with_dispatch(col)
+ self._validate_dest_table(table)
+
if self.use_alter:
def supports_alter(ddl, event, schema_item, bind, **kw):
return table in set(kw['tables']) and \
@@ -2651,11 +2687,16 @@ class ForeignKeyConstraint(Constraint):
event.listen(table.metadata, "before_drop",
ddl.DropConstraint(self, on=supports_alter))
- def copy(self, schema=None, **kw):
+ def copy(self, schema=None, target_table=None, **kw):
fkc = ForeignKeyConstraint(
- [x.parent.key for x in self._elements.values()],
- [x._get_colspec(schema=schema)
- for x in self._elements.values()],
+ [x.parent.key for x in self.elements],
+ [x._get_colspec(
+ schema=schema,
+ table_name=target_table.name
+ if target_table is not None
+ and x._table_key() == x.parent.table.key
+ else None)
+ for x in self.elements],
name=self.name,
onupdate=self.onupdate,
ondelete=self.ondelete,
@@ -2666,8 +2707,8 @@ class ForeignKeyConstraint(Constraint):
match=self.match
)
for self_fk, other_fk in zip(
- self._elements.values(),
- fkc._elements.values()):
+ self.elements,
+ fkc.elements):
self_fk._schema_item_copy(other_fk)
return self._schema_item_copy(fkc)
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index b4df87e54..8198a6733 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -2572,7 +2572,7 @@ class Select(HasPrefixes, GenerativeSelect):
following::
select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)")
+ with_hint(mytable, "index(%(name)s ix_mytable)")
Would render SQL as::
@@ -2583,8 +2583,7 @@ class Select(HasPrefixes, GenerativeSelect):
and Sybase simultaneously::
select([mytable]).\\
- with_hint(
- mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
+ with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
.. seealso::
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 67c13231e..1284f9c2a 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -37,8 +37,6 @@ class ConnectionKiller(object):
def _safe(self, fn):
try:
fn()
- except (SystemExit, KeyboardInterrupt):
- raise
except Exception as e:
warnings.warn(
"testing_reaper couldn't "
@@ -168,8 +166,6 @@ class ReconnectFixture(object):
def _safe(self, fn):
try:
fn()
- except (SystemExit, KeyboardInterrupt):
- raise
except Exception as e:
warnings.warn(
"ReconnectFixture couldn't "
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index 49211f805..f94724608 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -178,8 +178,7 @@ class Predicate(object):
@classmethod
def as_predicate(cls, predicate, description=None):
if isinstance(predicate, compound):
- return cls.as_predicate(predicate.fails.union(predicate.skips))
-
+ return cls.as_predicate(predicate.enabled_for_config, description)
elif isinstance(predicate, Predicate):
if description and predicate.description is None:
predicate.description = description
diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py
index 6696427dc..614a12133 100644
--- a/lib/sqlalchemy/testing/plugin/plugin_base.py
+++ b/lib/sqlalchemy/testing/plugin/plugin_base.py
@@ -93,7 +93,10 @@ def setup_options(make_option):
help="Exclude tests with tag <tag>")
make_option("--write-profiles", action="store_true",
dest="write_profiles", default=False,
- help="Write/update profiling data.")
+ help="Write/update failing profiling data.")
+ make_option("--force-write-profiles", action="store_true",
+ dest="force_write_profiles", default=False,
+ help="Unconditionally write/update profiling data.")
def configure_follower(follower_ident):
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index fcb888f86..671bbe32d 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -42,7 +42,11 @@ class ProfileStatsFile(object):
"""
def __init__(self, filename):
- self.write = (
+ self.force_write = (
+ config.options is not None and
+ config.options.force_write_profiles
+ )
+ self.write = self.force_write or (
config.options is not None and
config.options.write_profiles
)
@@ -115,7 +119,11 @@ class ProfileStatsFile(object):
per_fn = self.data[test_key]
per_platform = per_fn[self.platform_key]
counts = per_platform['counts']
- counts[-1] = callcount
+ current_count = per_platform['current_count']
+ if current_count < len(counts):
+ counts[current_count - 1] = callcount
+ else:
+ counts[-1] = callcount
if self.write:
self._write()
@@ -235,7 +243,7 @@ def count_functions(variance=0.05):
deviance = int(callcount * variance)
failed = abs(callcount - expected_count) > deviance
- if failed:
+ if failed or _profile_stats.force_write:
if _profile_stats.write:
_profile_stats.replace(callcount)
else:
diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py
index 0bcdad959..c8f7fdf30 100644
--- a/lib/sqlalchemy/testing/provision.py
+++ b/lib/sqlalchemy/testing/provision.py
@@ -120,7 +120,7 @@ def _pg_create_db(cfg, eng, ident):
isolation_level="AUTOCOMMIT") as conn:
try:
_pg_drop_db(cfg, conn, ident)
- except:
+ except Exception:
pass
currentdb = conn.scalar("select current_database()")
conn.execute("CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb))
@@ -131,7 +131,7 @@ def _mysql_create_db(cfg, eng, ident):
with eng.connect() as conn:
try:
_mysql_drop_db(cfg, conn, ident)
- except:
+ except Exception:
pass
conn.execute("CREATE DATABASE %s" % ident)
conn.execute("CREATE DATABASE %s_test_schema" % ident)
@@ -173,15 +173,15 @@ def _mysql_drop_db(cfg, eng, ident):
with eng.connect() as conn:
try:
conn.execute("DROP DATABASE %s_test_schema" % ident)
- except:
+ except Exception:
pass
try:
conn.execute("DROP DATABASE %s_test_schema_2" % ident)
- except:
+ except Exception:
pass
try:
conn.execute("DROP DATABASE %s" % ident)
- except:
+ except Exception:
pass
diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py
index 92d3d93e5..38519dfb9 100644
--- a/lib/sqlalchemy/testing/suite/test_insert.py
+++ b/lib/sqlalchemy/testing/suite/test_insert.py
@@ -4,7 +4,7 @@ from .. import exclusions
from ..assertions import eq_
from .. import engines
-from sqlalchemy import Integer, String, select, util
+from sqlalchemy import Integer, String, select, literal_column, literal
from ..schema import Table, Column
@@ -90,6 +90,13 @@ class InsertBehaviorTest(fixtures.TablesTest):
Column('id', Integer, primary_key=True, autoincrement=False),
Column('data', String(50))
)
+ Table('includes_defaults', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('x', Integer, default=5),
+ Column('y', Integer,
+ default=literal_column("2", type_=Integer) + literal(2)))
def test_autoclose_on_insert(self):
if requirements.returning.enabled:
@@ -158,6 +165,34 @@ class InsertBehaviorTest(fixtures.TablesTest):
("data3", ), ("data3", )]
)
+ @requirements.insert_from_select
+ def test_insert_from_select_with_defaults(self):
+ table = self.tables.includes_defaults
+ config.db.execute(
+ table.insert(),
+ [
+ dict(id=1, data="data1"),
+ dict(id=2, data="data2"),
+ dict(id=3, data="data3"),
+ ]
+ )
+
+ config.db.execute(
+ table.insert(inline=True).
+ from_select(("id", "data",),
+ select([table.c.id + 5, table.c.data]).
+ where(table.c.data.in_(["data2", "data3"]))
+ ),
+ )
+
+ eq_(
+ config.db.execute(
+ select([table]).order_by(table.c.data, table.c.id)
+ ).fetchall(),
+ [(1, 'data1', 5, 4), (2, 'data2', 5, 4),
+ (7, 'data2', 5, 4), (3, 'data3', 5, 4), (8, 'data3', 5, 4)]
+ )
+
class ReturningTest(fixtures.TablesTest):
run_create_tables = 'each'
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 60db9eb47..08b858b47 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -500,10 +500,12 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.requires.unique_constraint_reflection
def test_get_temp_table_unique_constraints(self):
insp = inspect(self.metadata.bind)
- eq_(
- insp.get_unique_constraints('user_tmp'),
- [{'column_names': ['name'], 'name': 'user_tmp_uq'}]
- )
+ reflected = insp.get_unique_constraints('user_tmp')
+ for refl in reflected:
+ # Different dialects handle duplicate index and constraints
+ # differently, so ignore this flag
+ refl.pop('duplicates_index', None)
+ eq_(reflected, [{'column_names': ['name'], 'name': 'user_tmp_uq'}])
@testing.requires.temp_table_reflection
def test_get_temp_table_indexes(self):
@@ -556,6 +558,9 @@ class ComponentReflectionTest(fixtures.TablesTest):
)
for orig, refl in zip(uniques, reflected):
+ # Different dialects handle duplicate index and constraints
+ # differently, so ignore this flag
+ refl.pop('duplicates_index', None)
eq_(orig, refl)
@testing.provide_metadata
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index a1fbc0fa0..d36852698 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -10,9 +10,10 @@
from __future__ import absolute_import
import weakref
import operator
-from .compat import threading, itertools_filterfalse
+from .compat import threading, itertools_filterfalse, string_types
from . import py2k
import types
+import collections
EMPTY_SET = frozenset()
@@ -779,10 +780,12 @@ def coerce_generator_arg(arg):
def to_list(x, default=None):
if x is None:
return default
- if not isinstance(x, (list, tuple)):
+ if not isinstance(x, collections.Iterable) or isinstance(x, string_types):
return [x]
- else:
+ elif isinstance(x, list):
return x
+ else:
+ return list(x)
def to_set(x):
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index 95369783d..5c17bea88 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -134,7 +134,8 @@ def public_factory(target, location):
fn = target.__init__
callable_ = target
doc = "Construct a new :class:`.%s` object. \n\n"\
- "This constructor is mirrored as a public API function; see :func:`~%s` "\
+ "This constructor is mirrored as a public API function; "\
+ "see :func:`~%s` "\
"for a full usage and argument description." % (
target.__name__, location, )
else:
@@ -155,6 +156,7 @@ def %(name)s(%(args)s):
exec(code, env)
decorated = env[location_name]
decorated.__doc__ = fn.__doc__
+ decorated.__module__ = "sqlalchemy" + location.rsplit(".", 1)[0]
if compat.py2k or hasattr(fn, '__func__'):
fn.__func__.__doc__ = doc
else:
@@ -490,7 +492,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()):
val = getattr(obj, arg, missing)
if val is not missing and val != defval:
output.append('%s=%r' % (arg, val))
- except:
+ except Exception:
pass
if additional_kw:
@@ -499,7 +501,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()):
val = getattr(obj, arg, missing)
if val is not missing and val != defval:
output.append('%s=%r' % (arg, val))
- except:
+ except Exception:
pass
return "%s(%s)" % (obj.__class__.__name__, ", ".join(output))
@@ -1198,7 +1200,7 @@ def warn_exception(func, *args, **kwargs):
"""
try:
return func(*args, **kwargs)
- except:
+ except Exception:
warn("%s('%s') ignored" % sys.exc_info()[0:2])
diff --git a/test/aaa_profiling/test_memusage.py b/test/aaa_profiling/test_memusage.py
index f4bce6b01..63883daac 100644
--- a/test/aaa_profiling/test_memusage.py
+++ b/test/aaa_profiling/test_memusage.py
@@ -658,6 +658,32 @@ class MemUsageTest(EnsureZeroed):
row[t.c.x]
go()
+ def test_many_discarded_relationships(self):
+ """a use case that really isn't supported, nonetheless we can
+ guard against memleaks here so why not"""
+
+ m1 = MetaData()
+ t1 = Table('t1', m1, Column('id', Integer, primary_key=True))
+ t2 = Table(
+ 't2', m1, Column('id', Integer, primary_key=True),
+ Column('t1id', ForeignKey('t1.id')))
+
+ class T1(object):
+ pass
+ t1_mapper = mapper(T1, t1)
+
+ @testing.emits_warning()
+ @profile_memory()
+ def go():
+ class T2(object):
+ pass
+ t2_mapper = mapper(T2, t2)
+ t1_mapper.add_property("bar", relationship(t2_mapper))
+ s1 = Session()
+ # this causes the path_registry to be invoked
+ s1.query(t1_mapper)._compile_context()
+ go()
+
# fails on newer versions of pysqlite due to unusual memory behvior
# in pysqlite itself. background at:
# http://thread.gmane.org/gmane.comp.python.db.pysqlite.user/2290
diff --git a/test/base/test_except.py b/test/base/test_except.py
index a62382725..918e7a042 100644
--- a/test/base/test_except.py
+++ b/test/base/test_except.py
@@ -2,19 +2,12 @@
from sqlalchemy import exc as sa_exceptions
-from sqlalchemy import util
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import eq_
-if util.py2k:
- from exceptions import StandardError, KeyboardInterrupt, SystemExit
-else:
- Exception = BaseException
-
class Error(Exception):
- """This class will be old-style on <= 2.4 and new-style on >=
- 2.5."""
+ pass
class DatabaseError(Error):
@@ -26,6 +19,7 @@ class OperationalError(DatabaseError):
class ProgrammingError(DatabaseError):
+
def __str__(self):
return '<%s>' % self.bogus
@@ -38,89 +32,110 @@ class WrapTest(fixtures.TestBase):
def test_db_error_normal(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- OperationalError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError:
self.assert_(True)
def test_tostring(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message'
- , None, OperationalError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
+ None, OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- assert str(exc) \
- == "(OperationalError) 'this is a message' None"
+ eq_(
+ str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message']")
def test_tostring_large_dict(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message'
- ,
- {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h':
- 8, 'i': 9, 'j': 10, 'k': 11,
- }, OperationalError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
+ {
+ 'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7,
+ 'h': 8, 'i': 9, 'j': 10, 'k': 11
+ },
+ OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- assert str(exc).startswith("(OperationalError) 'this is a "
- "message' {")
+ assert str(exc).startswith(
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: {")
def test_tostring_large_list(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
- [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,],
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- assert str(exc).startswith("(OperationalError) 'this is a "
- "message' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]")
+ assert str(exc).startswith(
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: "
+ "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]]")
def test_tostring_large_executemany(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
[{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
- {1: 1}, {1:1}, {1: 1}, {1: 1},],
+ {1: 1}, {1: 1}, {1: 1}, {1: 1}, ],
OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- eq_(str(exc) ,
- "(OperationalError) 'this is a message' [{1: 1}, "\
- "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: "\
- "1}, {1: 1}, {1: 1}]")
+ eq_(
+ str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [{1: 1}, "
+ "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: "
+ "1}, {1: 1}, {1: 1}]]"
+ )
try:
raise sa_exceptions.DBAPIError.instance('this is a message', [
{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
- {1:1}, {1: 1}, {1: 1}, {1: 1},
- ], OperationalError(), DatabaseError)
+ {1: 1}, {1: 1}, {1: 1}, {1: 1},
+ ], OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- eq_(str(exc) ,
- "(OperationalError) 'this is a message' [{1: 1}, "
+ eq_(str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [{1: 1}, "
"{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, "
"{1: 1}, {1: 1} ... displaying 10 of 11 total "
- "bound parameter sets ... {1: 1}, {1: 1}]"
- )
+ "bound parameter sets ... {1: 1}, {1: 1}]]"
+ )
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
[
- (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
- (1, ),
+ (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
+ (1, ), (1, ), (1, ), (1, ),
], OperationalError(), DatabaseError)
+
except sa_exceptions.DBAPIError as exc:
- eq_(str(exc),
- "(OperationalError) 'this is a message' [(1,), "\
- "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]")
+ eq_(
+ str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [(1,), "
+ "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]]")
try:
raise sa_exceptions.DBAPIError.instance('this is a message', [
(1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
(1, ), (1, ),
- ], OperationalError(), DatabaseError)
+ ], OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
eq_(str(exc),
- "(OperationalError) 'this is a message' [(1,), "
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [(1,), "
"(1,), (1,), (1,), (1,), (1,), (1,), (1,) "
"... displaying 10 of 11 total bound "
- "parameter sets ... (1,), (1,)]"
- )
+ "parameter sets ... (1,), (1,)]]"
+ )
def test_db_error_busted_dbapi(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- ProgrammingError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ ProgrammingError(), DatabaseError)
except sa_exceptions.DBAPIError as e:
self.assert_(True)
self.assert_('Error in str() of DB-API' in e.args[0])
@@ -147,8 +162,9 @@ class WrapTest(fixtures.TestBase):
def test_db_error_keyboard_interrupt(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- KeyboardInterrupt(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ KeyboardInterrupt(), DatabaseError)
except sa_exceptions.DBAPIError:
self.assert_(False)
except KeyboardInterrupt:
@@ -156,8 +172,9 @@ class WrapTest(fixtures.TestBase):
def test_db_error_system_exit(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- SystemExit(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ SystemExit(), DatabaseError)
except sa_exceptions.DBAPIError:
self.assert_(False)
except SystemExit:
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index a378b0160..df61d7874 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -6,6 +6,7 @@ from sqlalchemy.testing import eq_, is_, ne_, fails_if
from sqlalchemy.testing.util import picklers, gc_collect
from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec
from sqlalchemy.sql import column
+from sqlalchemy.util import langhelpers
class _KeyedTupleTest(object):
@@ -283,6 +284,35 @@ class MemoizedAttrTest(fixtures.TestBase):
eq_(val[0], 21)
+class ToListTest(fixtures.TestBase):
+ def test_from_string(self):
+ eq_(
+ util.to_list("xyz"),
+ ["xyz"]
+ )
+
+ def test_from_set(self):
+ spec = util.to_list(set([1, 2, 3]))
+ assert isinstance(spec, list)
+ eq_(
+ sorted(spec),
+ [1, 2, 3]
+ )
+
+ def test_from_dict(self):
+ spec = util.to_list({1: "a", 2: "b", 3: "c"})
+ assert isinstance(spec, list)
+ eq_(
+ sorted(spec),
+ [1, 2, 3]
+ )
+
+ def test_from_tuple(self):
+ eq_(
+ util.to_list((1, 2, 3)),
+ [1, 2, 3]
+ )
+
class ColumnCollectionTest(fixtures.TestBase):
def test_in(self):
@@ -1274,6 +1304,43 @@ class DuckTypeCollectionTest(fixtures.TestBase):
is_(util.duck_type_collection(instance), None)
+class PublicFactoryTest(fixtures.TestBase):
+
+ def _fixture(self):
+ class Thingy(object):
+ def __init__(self, value):
+ "make a thingy"
+ self.value = value
+
+ @classmethod
+ def foobar(cls, x, y):
+ "do the foobar"
+ return Thingy(x + y)
+
+ return Thingy
+
+ def test_classmethod(self):
+ Thingy = self._fixture()
+ foob = langhelpers.public_factory(
+ Thingy.foobar, ".sql.elements.foob")
+ eq_(foob(3, 4).value, 7)
+ eq_(foob(x=3, y=4).value, 7)
+ eq_(foob.__doc__, "do the foobar")
+ eq_(foob.__module__, "sqlalchemy.sql.elements")
+ assert Thingy.foobar.__doc__.startswith("This function is mirrored;")
+
+ def test_constructor(self):
+ Thingy = self._fixture()
+ foob = langhelpers.public_factory(
+ Thingy, ".sql.elements.foob")
+ eq_(foob(7).value, 7)
+ eq_(foob(value=7).value, 7)
+ eq_(foob.__doc__, "make a thingy")
+ eq_(foob.__module__, "sqlalchemy.sql.elements")
+ assert Thingy.__init__.__doc__.startswith(
+ "Construct a new :class:`.Thingy` object.")
+
+
class ArgInspectionTest(fixtures.TestBase):
def test_get_cls_kwargs(self):
diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py
index e93162a8e..0ef69f656 100644
--- a/test/dialect/mssql/test_reflection.py
+++ b/test/dialect/mssql/test_reflection.py
@@ -187,7 +187,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = :TABLE_NAME_1",
+ "[TABLES_1].[TABLE_NAME] = :table_name_1",
dialect=dialect
)
@@ -197,7 +197,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = CAST(:TABLE_NAME_1 AS NVARCHAR(max))",
+ "[TABLES_1].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
dialect=dialect
)
diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py
index bf35a2c6b..99733e397 100644
--- a/test/dialect/mysql/test_reflection.py
+++ b/test/dialect/mysql/test_reflection.py
@@ -283,6 +283,38 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
view_names = dialect.get_view_names(connection, "information_schema")
self.assert_('TABLES' in view_names)
+ @testing.provide_metadata
+ def test_reflection_with_unique_constraint(self):
+ insp = inspect(testing.db)
+
+ meta = self.metadata
+ uc_table = Table('mysql_uc', meta,
+ Column('a', String(10)),
+ UniqueConstraint('a', name='uc_a'))
+
+ uc_table.create()
+
+ # MySQL converts unique constraints into unique indexes.
+ # separately we get both
+ indexes = dict((i['name'], i) for i in insp.get_indexes('mysql_uc'))
+ constraints = set(i['name']
+ for i in insp.get_unique_constraints('mysql_uc'))
+
+ self.assert_('uc_a' in indexes)
+ self.assert_(indexes['uc_a']['unique'])
+ self.assert_('uc_a' in constraints)
+
+ # reflection here favors the unique index, as that's the
+ # more "official" MySQL construct
+ reflected = Table('mysql_uc', MetaData(testing.db), autoload=True)
+
+ indexes = dict((i.name, i) for i in reflected.indexes)
+ constraints = set(uc.name for uc in reflected.constraints)
+
+ self.assert_('uc_a' in indexes)
+ self.assert_(indexes['uc_a'].unique)
+ self.assert_('uc_a' not in constraints)
+
class RawReflectionTest(fixtures.TestBase):
def setup(self):
diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py
index 75dbe15e0..e65acc6db 100644
--- a/test/dialect/mysql/test_types.py
+++ b/test/dialect/mysql/test_types.py
@@ -154,10 +154,8 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
res
)
- @testing.fails_if(
- lambda: testing.against("mysql+mysqlconnector")
- and not util.py3k,
- "bug in mysqlconnector; http://bugs.mysql.com/bug.php?id=73266")
+ # fixed in mysql-connector as of 2.0.1,
+ # see http://bugs.mysql.com/bug.php?id=73266
@testing.provide_metadata
def test_precision_float_roundtrip(self):
t = Table('t', self.metadata,
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index b8b9be3de..8de71216e 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -7,7 +7,8 @@ from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy import inspect
from sqlalchemy import Table, Column, MetaData, Integer, String, \
- PrimaryKeyConstraint, ForeignKey, join, Sequence
+ PrimaryKeyConstraint, ForeignKey, join, Sequence, UniqueConstraint, \
+ Index
from sqlalchemy import exc
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import base as postgresql
@@ -803,6 +804,66 @@ class ReflectionTest(fixtures.TestBase):
'labels': ['sad', 'ok', 'happy']
}])
+ @testing.provide_metadata
+ def test_reflection_with_unique_constraint(self):
+ insp = inspect(testing.db)
+
+ meta = self.metadata
+ uc_table = Table('pgsql_uc', meta,
+ Column('a', String(10)),
+ UniqueConstraint('a', name='uc_a'))
+
+ uc_table.create()
+
+ # PostgreSQL will create an implicit index for a unique
+ # constraint. Separately we get both
+ indexes = set(i['name'] for i in insp.get_indexes('pgsql_uc'))
+ constraints = set(i['name']
+ for i in insp.get_unique_constraints('pgsql_uc'))
+
+ self.assert_('uc_a' in indexes)
+ self.assert_('uc_a' in constraints)
+
+ # reflection corrects for the dupe
+ reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True)
+
+ indexes = set(i.name for i in reflected.indexes)
+ constraints = set(uc.name for uc in reflected.constraints)
+
+ self.assert_('uc_a' not in indexes)
+ self.assert_('uc_a' in constraints)
+
+ @testing.provide_metadata
+ def test_reflect_unique_index(self):
+ insp = inspect(testing.db)
+
+ meta = self.metadata
+
+ # a unique index OTOH we are able to detect is an index
+ # and not a unique constraint
+ uc_table = Table('pgsql_uc', meta,
+ Column('a', String(10)),
+ Index('ix_a', 'a', unique=True))
+
+ uc_table.create()
+
+ indexes = dict((i['name'], i) for i in insp.get_indexes('pgsql_uc'))
+ constraints = set(i['name']
+ for i in insp.get_unique_constraints('pgsql_uc'))
+
+ self.assert_('ix_a' in indexes)
+ assert indexes['ix_a']['unique']
+ self.assert_('ix_a' not in constraints)
+
+ reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True)
+
+ indexes = dict((i.name, i) for i in reflected.indexes)
+ constraints = set(uc.name for uc in reflected.constraints)
+
+ self.assert_('ix_a' in indexes)
+ assert indexes['ix_a'].unique
+ self.assert_('ix_a' not in constraints)
+
class CustomTypeReflectionTest(fixtures.TestBase):
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index 36eacf864..a771c5d80 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -104,6 +104,28 @@ class QuotedBindRoundTripTest(fixtures.TestBase):
(2, 2, 2)
)
+ def test_numeric_bind_round_trip(self):
+ eq_(
+ testing.db.scalar(
+ select([
+ literal_column("2", type_=Integer()) +
+ bindparam("2_1", value=2)])
+ ),
+ 4
+ )
+
+ @testing.provide_metadata
+ def test_numeric_bind_in_crud(self):
+ t = Table(
+ "asfd", self.metadata,
+ Column("100K", Integer)
+ )
+ t.create()
+
+ testing.db.execute(t.insert(), {"100K": 10})
+ eq_(
+ testing.db.scalar(t.select()), 10
+ )
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "oracle" #oracle.dialect()
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index 219a145c6..5c3279ba9 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -25,6 +25,10 @@ from sqlalchemy.util import nested
users, metadata, users_autoinc = None, None, None
+class SomeException(Exception):
+ pass
+
+
class ExecuteTest(fixtures.TestBase):
__backend__ = True
@@ -280,12 +284,13 @@ class ExecuteTest(fixtures.TestBase):
impl = Integer
def process_bind_param(self, value, dialect):
- raise Exception("nope")
+ raise SomeException("nope")
def _go(conn):
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
+ r"\(test.engine.test_execute.SomeException\) "
+ "nope \[SQL\: u?'SELECT 1 ",
conn.execute,
select([1]).
where(
@@ -479,6 +484,26 @@ class ExecuteTest(fixtures.TestBase):
eq_(canary, ["l1", "l2", "l3", "l1", "l2"])
@testing.requires.ad_hoc_engines
+ def test_autocommit_option_no_issue_first_connect(self):
+ eng = create_engine(testing.db.url)
+ eng.update_execution_options(autocommit=True)
+ conn = eng.connect()
+ eq_(conn._execution_options, {"autocommit": True})
+ conn.close()
+
+ @testing.requires.ad_hoc_engines
+ def test_dialect_init_uses_options(self):
+ eng = create_engine(testing.db.url)
+
+ def my_init(connection):
+ connection.execution_options(foo='bar').execute(select([1]))
+
+ with patch.object(eng.dialect, "initialize", my_init):
+ conn = eng.connect()
+ eq_(conn._execution_options, {})
+ conn.close()
+
+ @testing.requires.ad_hoc_engines
def test_generative_engine_event_dispatch_hasevents(self):
def l1(*arg, **kw):
pass
@@ -541,7 +566,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest):
if is_transaction:
conn = conn.connection
conn.execute(self.table.insert().values(a=x, b=value))
- raise Exception("breakage")
+ raise SomeException("breakage")
return go
def _assert_no_data(self):
@@ -1577,7 +1602,7 @@ class HandleErrorTest(fixtures.TestBase):
listener = Mock(return_value=None)
event.listen(engine, 'dbapi_error', listener)
- nope = Exception("nope")
+ nope = SomeException("nope")
class MyType(TypeDecorator):
impl = Integer
@@ -1588,7 +1613,8 @@ class HandleErrorTest(fixtures.TestBase):
with engine.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
+ r"\(test.engine.test_execute.SomeException\) "
+ "nope \[SQL\: u?'SELECT 1 ",
conn.execute,
select([1]).where(
column('foo') == literal('bar', MyType()))
@@ -1768,7 +1794,7 @@ class HandleErrorTest(fixtures.TestBase):
listener = Mock(return_value=None)
event.listen(engine, 'handle_error', listener)
- nope = Exception("nope")
+ nope = SomeException("nope")
class MyType(TypeDecorator):
impl = Integer
@@ -1779,7 +1805,8 @@ class HandleErrorTest(fixtures.TestBase):
with engine.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
+ r"\(test.engine.test_execute.SomeException\) "
+ "nope \[SQL\: u?'SELECT 1 ",
conn.execute,
select([1]).where(
column('foo') == literal('bar', MyType()))
diff --git a/test/engine/test_logging.py b/test/engine/test_logging.py
index 1432a0f7b..180ea9388 100644
--- a/test/engine/test_logging.py
+++ b/test/engine/test_logging.py
@@ -56,7 +56,8 @@ class LogParamsTest(fixtures.TestBase):
def test_error_large_dict(self):
assert_raises_message(
tsa.exc.DBAPIError,
- r".*'INSERT INTO nonexistent \(data\) values \(:data\)' "
+ r".*'INSERT INTO nonexistent \(data\) values \(:data\)'\] "
+ "\[parameters: "
"\[{'data': '0'}, {'data': '1'}, {'data': '2'}, "
"{'data': '3'}, {'data': '4'}, {'data': '5'}, "
"{'data': '6'}, {'data': '7'} ... displaying 10 of "
@@ -71,8 +72,9 @@ class LogParamsTest(fixtures.TestBase):
assert_raises_message(
tsa.exc.DBAPIError,
r".*INSERT INTO nonexistent \(data\) values "
- "\(\?\)' \[\('0',\), \('1',\), \('2',\), \('3',\), "
- "\('4',\), \('5',\), \('6',\), \('7',\) ... displaying "
+ "\(\?\)'\] \[parameters: \[\('0',\), \('1',\), \('2',\), \('3',\), "
+ "\('4',\), \('5',\), \('6',\), \('7',\) "
+ "... displaying "
"10 of 100 total bound parameter sets ... "
"\('98',\), \('99',\)\]",
lambda: self.eng.execute(
diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py
index 5a99c9c5a..6ea37e4d3 100644
--- a/test/ext/declarative/test_inheritance.py
+++ b/test/ext/declarative/test_inheritance.py
@@ -1388,3 +1388,32 @@ class ConcreteExtensionConfigTest(
"WHERE something.id = pjoin.something_id AND something.id = :id_1)"
)
+ def test_abstract_in_hierarchy(self):
+ class Document(Base, AbstractConcreteBase):
+ doctype = Column(String)
+
+ class ContactDocument(Document):
+ __abstract__ = True
+
+ send_method = Column(String)
+
+ class ActualDocument(ContactDocument):
+ __tablename__ = 'actual_documents'
+ __mapper_args__ = {
+ 'concrete': True,
+ 'polymorphic_identity': 'actual'}
+
+ id = Column(Integer, primary_key=True)
+
+ configure_mappers()
+ session = Session()
+ self.assert_compile(
+ session.query(Document),
+ "SELECT pjoin.doctype AS pjoin_doctype, "
+ "pjoin.send_method AS pjoin_send_method, "
+ "pjoin.id AS pjoin_id, pjoin.type AS pjoin_type "
+ "FROM (SELECT actual_documents.doctype AS doctype, "
+ "actual_documents.send_method AS send_method, "
+ "actual_documents.id AS id, 'actual' AS type "
+ "FROM actual_documents) AS pjoin"
+ ) \ No newline at end of file
diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py
index be42cce52..dbbe4c435 100644
--- a/test/orm/inheritance/test_single.py
+++ b/test/orm/inheritance/test_single.py
@@ -386,7 +386,31 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
]
)
- def test_outer_join(self):
+ def test_of_type_aliased_fromjoinpoint(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'employee':relationship(Employee)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company).outerjoin(
+ Company.employee.of_type(Engineer),
+ aliased=True, from_joinpoint=True),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name FROM companies "
+ "LEFT OUTER JOIN employees AS employees_1 ON "
+ "companies.company_id = employees_1.company_id "
+ "AND employees_1.type IN (:type_1)"
+ )
+
+ def test_outer_join_prop(self):
Company, Employee, Engineer = self.classes.Company,\
self.classes.Employee,\
self.classes.Engineer
@@ -407,7 +431,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
"= employees.company_id AND employees.type IN (:type_1)"
)
- def test_outer_join_alias(self):
+ def test_outer_join_prop_alias(self):
Company, Employee, Engineer = self.classes.Company,\
self.classes.Employee,\
self.classes.Engineer
@@ -431,6 +455,184 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
)
+ def test_outer_join_literal_onclause(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, Engineer).outerjoin(
+ Engineer, Company.company_id == Engineer.company_id),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees.employee_id AS employees_employee_id, "
+ "employees.name AS employees_name, "
+ "employees.manager_data AS employees_manager_data, "
+ "employees.engineer_info AS employees_engineer_info, "
+ "employees.type AS employees_type, "
+ "employees.company_id AS employees_company_id FROM companies "
+ "LEFT OUTER JOIN employees ON "
+ "companies.company_id = employees.company_id "
+ "AND employees.type IN (:type_1)"
+ )
+
+ def test_outer_join_literal_onclause_alias(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ eng_alias = aliased(Engineer)
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, eng_alias).outerjoin(
+ eng_alias, Company.company_id == eng_alias.company_id),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees_1.employee_id AS employees_1_employee_id, "
+ "employees_1.name AS employees_1_name, "
+ "employees_1.manager_data AS employees_1_manager_data, "
+ "employees_1.engineer_info AS employees_1_engineer_info, "
+ "employees_1.type AS employees_1_type, "
+ "employees_1.company_id AS employees_1_company_id "
+ "FROM companies LEFT OUTER JOIN employees AS employees_1 ON "
+ "companies.company_id = employees_1.company_id "
+ "AND employees_1.type IN (:type_1)"
+ )
+
+ def test_outer_join_no_onclause(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, Engineer).outerjoin(
+ Engineer),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees.employee_id AS employees_employee_id, "
+ "employees.name AS employees_name, "
+ "employees.manager_data AS employees_manager_data, "
+ "employees.engineer_info AS employees_engineer_info, "
+ "employees.type AS employees_type, "
+ "employees.company_id AS employees_company_id "
+ "FROM companies LEFT OUTER JOIN employees ON "
+ "companies.company_id = employees.company_id "
+ "AND employees.type IN (:type_1)"
+ )
+
+ def test_outer_join_no_onclause_alias(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ eng_alias = aliased(Engineer)
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, eng_alias).outerjoin(
+ eng_alias),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees_1.employee_id AS employees_1_employee_id, "
+ "employees_1.name AS employees_1_name, "
+ "employees_1.manager_data AS employees_1_manager_data, "
+ "employees_1.engineer_info AS employees_1_engineer_info, "
+ "employees_1.type AS employees_1_type, "
+ "employees_1.company_id AS employees_1_company_id "
+ "FROM companies LEFT OUTER JOIN employees AS employees_1 ON "
+ "companies.company_id = employees_1.company_id "
+ "AND employees_1.type IN (:type_1)"
+ )
+
+ def test_no_aliasing_from_overlap(self):
+ # test [ticket:3233]
+
+ Company, Employee, Engineer, Manager = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer,\
+ self.classes.Manager
+
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'employees': relationship(Employee, backref="company")
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+ mapper(Manager, inherits=Employee, polymorphic_identity='manager')
+
+ s = create_session()
+
+ q1 = s.query(Engineer).\
+ join(Engineer.company).\
+ join(Manager, Company.employees)
+
+ q2 = s.query(Engineer).\
+ join(Engineer.company).\
+ join(Manager, Company.company_id == Manager.company_id)
+
+ q3 = s.query(Engineer).\
+ join(Engineer.company).\
+ join(Manager, Company.employees.of_type(Manager))
+
+ q4 = s.query(Engineer).\
+ join(Company, Company.company_id == Engineer.company_id).\
+ join(Manager, Company.employees.of_type(Manager))
+
+ q5 = s.query(Engineer).\
+ join(Company, Company.company_id == Engineer.company_id).\
+ join(Manager, Company.company_id == Manager.company_id)
+
+ # note that the query is incorrect SQL; we JOIN to
+ # employees twice. However, this is what's expected so we seek
+ # to be consistent; previously, aliasing would sneak in due to the
+ # nature of the "left" side.
+ for q in [q1, q2, q3, q4, q5]:
+ self.assert_compile(
+ q,
+ "SELECT employees.employee_id AS employees_employee_id, "
+ "employees.name AS employees_name, "
+ "employees.manager_data AS employees_manager_data, "
+ "employees.engineer_info AS employees_engineer_info, "
+ "employees.type AS employees_type, "
+ "employees.company_id AS employees_company_id "
+ "FROM employees JOIN companies "
+ "ON companies.company_id = employees.company_id "
+ "JOIN employees "
+ "ON companies.company_id = employees.company_id "
+ "AND employees.type IN (:type_1) "
+ "WHERE employees.type IN (:type_2)"
+ )
+
def test_relationship_to_subclass(self):
JuniorEngineer, Company, companies, Manager, \
Employee, employees, Engineer = (self.classes.JuniorEngineer,
diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py
index 2bee3cbd6..48faa172f 100644
--- a/test/orm/test_assorted_eager.py
+++ b/test/orm/test_assorted_eager.py
@@ -82,8 +82,8 @@ class EagerTest(fixtures.MappedTest):
mapper(Category, categories)
mapper(Option, options, properties=dict(
- owner=relationship(Owner),
- test=relationship(Thing)))
+ owner=relationship(Owner, viewonly=True),
+ test=relationship(Thing, viewonly=True)))
mapper(Thing, tests, properties=dict(
owner=relationship(Owner, backref='tests'),
diff --git a/test/orm/test_bind.py b/test/orm/test_bind.py
index 0d869130b..33cd66ebc 100644
--- a/test/orm/test_bind.py
+++ b/test/orm/test_bind.py
@@ -1,14 +1,206 @@
-from sqlalchemy.testing import assert_raises, assert_raises_message
-from sqlalchemy import MetaData, Integer
+from sqlalchemy.testing import assert_raises_message
+from sqlalchemy import MetaData, Integer, ForeignKey
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.schema import Column
from sqlalchemy.orm import mapper, create_session
import sqlalchemy as sa
from sqlalchemy import testing
-from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import fixtures, eq_, engines, is_
+from sqlalchemy.orm import relationship, Session, backref, sessionmaker
+from test.orm import _fixtures
+from sqlalchemy.testing.mock import Mock
-class BindTest(fixtures.MappedTest):
+class BindIntegrationTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+ def test_mapped_binds(self):
+ Address, addresses, users, User = (self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+
+ # ensure tables are unbound
+ m2 = sa.MetaData()
+ users_unbound = users.tometadata(m2)
+ addresses_unbound = addresses.tometadata(m2)
+
+ mapper(Address, addresses_unbound)
+ mapper(User, users_unbound, properties={
+ 'addresses': relationship(Address,
+ backref=backref("user", cascade="all"),
+ cascade="all")})
+
+ sess = Session(binds={User: self.metadata.bind,
+ Address: self.metadata.bind})
+
+ u1 = User(id=1, name='ed')
+ sess.add(u1)
+ eq_(sess.query(User).filter(User.id == 1).all(),
+ [User(id=1, name='ed')])
+
+ # test expression binding
+
+ sess.execute(users_unbound.insert(), params=dict(id=2,
+ name='jack'))
+ eq_(sess.execute(users_unbound.select(users_unbound.c.id
+ == 2)).fetchall(), [(2, 'jack')])
+
+ eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
+ [(2, 'jack')])
+
+ sess.execute(users_unbound.delete())
+ eq_(sess.execute(users_unbound.select()).fetchall(), [])
+
+ sess.close()
+
+ def test_table_binds(self):
+ Address, addresses, users, User = (self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+
+ # ensure tables are unbound
+ m2 = sa.MetaData()
+ users_unbound = users.tometadata(m2)
+ addresses_unbound = addresses.tometadata(m2)
+
+ mapper(Address, addresses_unbound)
+ mapper(User, users_unbound, properties={
+ 'addresses': relationship(Address,
+ backref=backref("user", cascade="all"),
+ cascade="all")})
+
+ Session = sessionmaker(binds={users_unbound: self.metadata.bind,
+ addresses_unbound: self.metadata.bind})
+ sess = Session()
+
+ u1 = User(id=1, name='ed')
+ sess.add(u1)
+ eq_(sess.query(User).filter(User.id == 1).all(),
+ [User(id=1, name='ed')])
+
+ sess.execute(users_unbound.insert(), params=dict(id=2, name='jack'))
+
+ eq_(sess.execute(users_unbound.select(users_unbound.c.id
+ == 2)).fetchall(), [(2, 'jack')])
+
+ eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
+ [(2, 'jack')])
+
+ sess.execute(users_unbound.delete())
+ eq_(sess.execute(users_unbound.select()).fetchall(), [])
+
+ sess.close()
+
+ def test_bind_from_metadata(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+
+ session = create_session()
+ session.execute(users.insert(), dict(name='Johnny'))
+
+ assert len(session.query(User).filter_by(name='Johnny').all()) == 1
+
+ session.execute(users.delete())
+
+ assert len(session.query(User).filter_by(name='Johnny').all()) == 0
+ session.close()
+
+ def test_bind_arguments(self):
+ users, Address, addresses, User = (self.tables.users,
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
+
+ mapper(User, users)
+ mapper(Address, addresses)
+
+ e1 = engines.testing_engine()
+ e2 = engines.testing_engine()
+ e3 = engines.testing_engine()
+
+ sess = Session(e3)
+ sess.bind_mapper(User, e1)
+ sess.bind_mapper(Address, e2)
+
+ assert sess.connection().engine is e3
+ assert sess.connection(bind=e1).engine is e1
+ assert sess.connection(mapper=Address, bind=e1).engine is e1
+ assert sess.connection(mapper=Address).engine is e2
+ assert sess.connection(clause=addresses.select()).engine is e2
+ assert sess.connection(mapper=User,
+ clause=addresses.select()).engine is e1
+ assert sess.connection(mapper=User,
+ clause=addresses.select(),
+ bind=e2).engine is e2
+
+ sess.close()
+
+ @engines.close_open_connections
+ def test_bound_connection(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+ c = testing.db.connect()
+ sess = create_session(bind=c)
+ sess.begin()
+ transaction = sess.transaction
+ u = User(name='u1')
+ sess.add(u)
+ sess.flush()
+ assert transaction._connection_for_bind(testing.db) \
+ is transaction._connection_for_bind(c) is c
+
+ assert_raises_message(sa.exc.InvalidRequestError,
+ 'Session already has a Connection '
+ 'associated',
+ transaction._connection_for_bind,
+ testing.db.connect())
+ transaction.rollback()
+ assert len(sess.query(User).all()) == 0
+ sess.close()
+
+ def test_bound_connection_transactional(self):
+ User, users = self.classes.User, self.tables.users
+
+ mapper(User, users)
+ c = testing.db.connect()
+
+ sess = create_session(bind=c, autocommit=False)
+ u = User(name='u1')
+ sess.add(u)
+ sess.flush()
+ sess.close()
+ assert not c.in_transaction()
+ assert c.scalar("select count(1) from users") == 0
+
+ sess = create_session(bind=c, autocommit=False)
+ u = User(name='u2')
+ sess.add(u)
+ sess.flush()
+ sess.commit()
+ assert not c.in_transaction()
+ assert c.scalar("select count(1) from users") == 1
+ c.execute("delete from users")
+ assert c.scalar("select count(1) from users") == 0
+
+ c = testing.db.connect()
+
+ trans = c.begin()
+ sess = create_session(bind=c, autocommit=True)
+ u = User(name='u3')
+ sess.add(u)
+ sess.flush()
+ assert c.in_transaction()
+ trans.commit()
+ assert not c.in_transaction()
+ assert c.scalar("select count(1) from users") == 1
+
+
+class SessionBindTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('test_table', metadata,
@@ -60,3 +252,216 @@ class BindTest(fixtures.MappedTest):
sess.flush)
+class GetBindTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'base_table', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+ Table(
+ 'w_mixin_table', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+ Table(
+ 'joined_sub_table', metadata,
+ Column('id', ForeignKey('base_table.id'), primary_key=True)
+ )
+ Table(
+ 'concrete_sub_table', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class MixinOne(cls.Basic):
+ pass
+
+ class BaseClass(cls.Basic):
+ pass
+
+ class ClassWMixin(MixinOne, cls.Basic):
+ pass
+
+ class JoinedSubClass(BaseClass):
+ pass
+
+ class ConcreteSubClass(BaseClass):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.ClassWMixin, cls.tables.w_mixin_table)
+ mapper(cls.classes.BaseClass, cls.tables.base_table)
+ mapper(
+ cls.classes.JoinedSubClass,
+ cls.tables.joined_sub_table, inherits=cls.classes.BaseClass)
+ mapper(
+ cls.classes.ConcreteSubClass,
+ cls.tables.concrete_sub_table, inherits=cls.classes.BaseClass,
+ concrete=True)
+
+ def _fixture(self, binds):
+ return Session(binds=binds)
+
+ def test_fallback_table_metadata(self):
+ session = self._fixture({})
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ testing.db
+ )
+
+ def test_bind_base_table_base_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.tables.base_table: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+
+ def test_bind_base_table_joined_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.tables.base_table: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ base_class_bind
+ )
+
+ def test_bind_joined_sub_table_joined_sub_class(self):
+ base_class_bind = Mock(name='base')
+ joined_class_bind = Mock(name='joined')
+ session = self._fixture({
+ self.tables.base_table: base_class_bind,
+ self.tables.joined_sub_table: joined_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ # joined table inheritance has to query based on the base
+ # table, so this is what we expect
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ base_class_bind
+ )
+
+ def test_bind_base_table_concrete_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.tables.base_table: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ testing.db
+ )
+
+ def test_bind_sub_table_concrete_sub_class(self):
+ base_class_bind = Mock(name='base')
+ concrete_sub_bind = Mock(name='concrete')
+
+ session = self._fixture({
+ self.tables.base_table: base_class_bind,
+ self.tables.concrete_sub_table: concrete_sub_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ concrete_sub_bind
+ )
+
+ def test_bind_base_class_base_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+
+ def test_bind_mixin_class_simple_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.MixinOne: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.ClassWMixin),
+ base_class_bind
+ )
+
+ def test_bind_base_class_joined_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ base_class_bind
+ )
+
+ def test_bind_joined_sub_class_joined_sub_class(self):
+ base_class_bind = Mock(name='base')
+ joined_class_bind = Mock(name='joined')
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind,
+ self.classes.JoinedSubClass: joined_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ joined_class_bind
+ )
+
+ def test_bind_base_class_concrete_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ base_class_bind
+ )
+
+ def test_bind_sub_class_concrete_sub_class(self):
+ base_class_bind = Mock(name='base')
+ concrete_sub_bind = Mock(name='concrete')
+
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind,
+ self.classes.ConcreteSubClass: concrete_sub_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ concrete_sub_bind
+ )
+
+
diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py
index bd6a17286..e39911d0f 100644
--- a/test/orm/test_cascade.py
+++ b/test/orm/test_cascade.py
@@ -1,3 +1,4 @@
+import copy
from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy import Integer, String, ForeignKey, Sequence, \
@@ -13,6 +14,7 @@ from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
+
class CascadeArgTest(fixtures.MappedTest):
run_inserts = None
run_create_tables = None
@@ -85,6 +87,12 @@ class CascadeArgTest(fixtures.MappedTest):
orm_util.CascadeOptions("all, delete-orphan"),
frozenset)
+ def test_cascade_deepcopy(self):
+ old = orm_util.CascadeOptions("all, delete-orphan")
+ new = copy.deepcopy(old)
+ eq_(old, new)
+
+
def test_cascade_assignable(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py
index 40bc01b5d..eba47dbec 100644
--- a/test/orm/test_joins.py
+++ b/test/orm/test_joins.py
@@ -361,6 +361,27 @@ class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL):
)
+class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+ @classmethod
+ def setup_mappers(cls):
+ User = cls.classes.User
+ Address = cls.classes.Address
+ users, addresses = (cls.tables.users, cls.tables.addresses)
+ mapper(User, users, properties={
+ 'addresses': relationship(Address),
+ 'ad_syn': synonym("addresses")
+ })
+ mapper(Address, addresses)
+
+ def test_join_on_synonym(self):
+ User = self.classes.User
+ self.assert_compile(
+ Session().query(User).join(User.ad_syn),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users JOIN addresses ON users.id = addresses.user_id"
+ )
+
+
class JoinTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -409,24 +430,6 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
sess.query(literal_column('x'), User).join, Address
)
- def test_join_on_synonym(self):
-
- class User(object):
- pass
- class Address(object):
- pass
- users, addresses = (self.tables.users, self.tables.addresses)
- mapper(User, users, properties={
- 'addresses':relationship(Address),
- 'ad_syn':synonym("addresses")
- })
- mapper(Address, addresses)
- self.assert_compile(
- Session().query(User).join(User.ad_syn),
- "SELECT users.id AS users_id, users.name AS users_name "
- "FROM users JOIN addresses ON users.id = addresses.user_id"
- )
-
def test_multi_tuple_form(self):
"""test the 'tuple' form of join, now superseded
by the two-element join() form.
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 0a9cbfc71..63ba1a207 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -222,7 +222,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(Address, addresses)
s = create_session()
a = s.query(Address).from_statement(
- sa.select([addresses.c.id, addresses.c.user_id])).first()
+ sa.select([addresses.c.id, addresses.c.user_id]).
+ order_by(addresses.c.id)).first()
eq_(a.user_id, 7)
eq_(a.id, 1)
# email address auto-defers
diff --git a/test/orm/test_of_type.py b/test/orm/test_of_type.py
index 836d85cc7..b9ebc2daf 100644
--- a/test/orm/test_of_type.py
+++ b/test/orm/test_of_type.py
@@ -14,6 +14,7 @@ from .inheritance._poly_fixtures import Company, Person, Engineer, Manager, Boss
_PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
_PolymorphicAliasedJoins
+
class _PolymorphicTestBase(object):
__dialect__ = 'default'
@@ -191,6 +192,21 @@ class _PolymorphicTestBase(object):
)
self.assert_sql_count(testing.db, go, 3)
+ def test_joinedload_stacked_of_type(self):
+ sess = Session()
+
+ def go():
+ eq_(
+ sess.query(Company).
+ filter_by(company_id=1).
+ options(
+ joinedload(Company.employees.of_type(Manager)),
+ joinedload(Company.employees.of_type(Engineer))
+ ).all(),
+ [self._company_with_emps_fixture()[0]]
+ )
+ self.assert_sql_count(testing.db, go, 2)
+
class PolymorphicPolymorphicTest(_PolymorphicTestBase, _PolymorphicPolymorphic):
def _polymorphic_join_target(self, cls):
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index 6bcb02639..2a15ce666 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -5,20 +5,22 @@ from sqlalchemy import testing
from sqlalchemy import Integer, String, ForeignKey, MetaData, and_
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, relation, \
- backref, create_session, configure_mappers, \
- clear_mappers, sessionmaker, attributes,\
- Session, composite, column_property, foreign,\
- remote, synonym, joinedload, subqueryload
-from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
+ backref, create_session, configure_mappers, \
+ clear_mappers, sessionmaker, attributes,\
+ Session, composite, column_property, foreign,\
+ remote, synonym, joinedload, subqueryload
+from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE
from sqlalchemy.testing import eq_, startswith_, AssertsCompiledSQL, is_
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy import exc
from sqlalchemy import inspect
+
class _RelationshipErrors(object):
+
def _assert_raises_no_relevant_fks(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
sa.exc.ArgumentError,
"Could not locate any relevant foreign key columns "
@@ -33,7 +35,7 @@ class _RelationshipErrors(object):
)
def _assert_raises_no_equality(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
sa.exc.ArgumentError,
"Could not locate any simple equality expressions "
@@ -50,7 +52,7 @@ class _RelationshipErrors(object):
)
def _assert_raises_ambig_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.ArgumentError,
@@ -78,7 +80,7 @@ class _RelationshipErrors(object):
fn, *arg, **kw)
def _assert_raises_no_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.NoForeignKeysError,
@@ -86,7 +88,8 @@ class _RelationshipErrors(object):
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables "
"via secondary table '%s'. "
- "Ensure that referencing columns are associated with a ForeignKey "
+ "Ensure that referencing columns are associated with a "
+ "ForeignKey "
"or ForeignKeyConstraint, or specify 'primaryjoin' and "
"'secondaryjoin' expressions"
% (relname, secondary_arg),
@@ -97,7 +100,8 @@ class _RelationshipErrors(object):
"Could not determine join condition between "
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables. "
- "Ensure that referencing columns are associated with a ForeignKey "
+ "Ensure that referencing columns are associated with a "
+ "ForeignKey "
"or ForeignKeyConstraint, or specify a 'primaryjoin' "
"expression."
% (relname,),
@@ -125,12 +129,16 @@ class _RelationshipErrors(object):
"pairs based on join condition and remote_side arguments. "
r"Consider using the remote\(\) annotation to "
"accurately mark those elements of the join "
- "condition that are on the remote side of the relationship." % relname,
+ "condition that are on the remote side of the relationship." % (
+ relname
+ ),
fn, *arg, **kw
)
+
class DependencyTwoParentTest(fixtures.MappedTest):
+
"""Test flush() when a mapper is dependent on multiple relationships"""
run_setup_mappers = 'once'
@@ -140,74 +148,77 @@ class DependencyTwoParentTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("tbl_a", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(128)))
Table("tbl_b", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(128)))
Table("tbl_c", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
- nullable=False),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
+ nullable=False),
+ Column("name", String(128)))
Table("tbl_d", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
- nullable=False),
- Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
+ nullable=False),
+ Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
+ Column("name", String(128)))
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
+
class B(cls.Basic):
pass
+
class C(cls.Basic):
pass
+
class D(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
A, C, B, D, tbl_b, tbl_c, tbl_a, tbl_d = (cls.classes.A,
- cls.classes.C,
- cls.classes.B,
- cls.classes.D,
- cls.tables.tbl_b,
- cls.tables.tbl_c,
- cls.tables.tbl_a,
- cls.tables.tbl_d)
+ cls.classes.C,
+ cls.classes.B,
+ cls.classes.D,
+ cls.tables.tbl_b,
+ cls.tables.tbl_c,
+ cls.tables.tbl_a,
+ cls.tables.tbl_d)
mapper(A, tbl_a, properties=dict(
c_rows=relationship(C, cascade="all, delete-orphan",
- backref="a_row")))
+ backref="a_row")))
mapper(B, tbl_b)
mapper(C, tbl_c, properties=dict(
d_rows=relationship(D, cascade="all, delete-orphan",
- backref="c_row")))
+ backref="c_row")))
mapper(D, tbl_d, properties=dict(
b_row=relationship(B)))
@classmethod
def insert_data(cls):
A, C, B, D = (cls.classes.A,
- cls.classes.C,
- cls.classes.B,
- cls.classes.D)
+ cls.classes.C,
+ cls.classes.B,
+ cls.classes.D)
session = create_session()
a = A(name='a1')
b = B(name='b1')
c = C(name='c1', a_row=a)
- d1 = D(name='d1', b_row=b, c_row=c)
- d2 = D(name='d2', b_row=b, c_row=c)
- d3 = D(name='d3', b_row=b, c_row=c)
+ d1 = D(name='d1', b_row=b, c_row=c) # noqa
+ d2 = D(name='d2', b_row=b, c_row=c) # noqa
+ d3 = D(name='d3', b_row=b, c_row=c) # noqa
session.add(a)
session.add(b)
session.flush()
@@ -230,7 +241,9 @@ class DependencyTwoParentTest(fixtures.MappedTest):
session.delete(c)
session.flush()
+
class M2ODontOverwriteFKTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table(
@@ -248,13 +261,13 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest):
class A(fixtures.BasicEntity):
pass
+
class B(fixtures.BasicEntity):
pass
-
mapper(A, a, properties={
- 'b': relationship(B, uselist=uselist)
- })
+ 'b': relationship(B, uselist=uselist)
+ })
mapper(B, b)
return A, B
@@ -271,7 +284,6 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest):
sess.commit()
# test that was broken by #3060
- from sqlalchemy.orm import joinedload
a1 = sess.query(A).options(joinedload("b")).first()
a1.bid = b1.id
sess.flush()
@@ -340,8 +352,8 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest):
assert a1.bid is not None
-
class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
+
"""Tests the ultimate join condition, a single column
that points to itself, e.g. within a SQL function or similar.
The test is against a materialized path setup.
@@ -365,28 +377,28 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
Table('entity', metadata,
- Column('path', String(100), primary_key=True)
- )
+ Column('path', String(100), primary_key=True)
+ )
@classmethod
def setup_classes(cls):
class Entity(cls.Basic):
+
def __init__(self, path):
self.path = path
-
def _descendants_fixture(self, data=True):
Entity = self.classes.Entity
entity = self.tables.entity
m = mapper(Entity, entity, properties={
- "descendants": relationship(Entity,
- primaryjoin=
- remote(foreign(entity.c.path)).like(
- entity.c.path.concat('/%')),
- viewonly=True,
- order_by=entity.c.path)
- })
+ "descendants": relationship(
+ Entity,
+ primaryjoin=remote(foreign(entity.c.path)).like(
+ entity.c.path.concat('/%')),
+ viewonly=True,
+ order_by=entity.c.path)
+ })
configure_mappers()
assert m.get_property("descendants").direction is ONETOMANY
if data:
@@ -397,13 +409,13 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
entity = self.tables.entity
m = mapper(Entity, entity, properties={
- "anscestors": relationship(Entity,
- primaryjoin=
- entity.c.path.like(
- remote(foreign(entity.c.path)).concat('/%')),
- viewonly=True,
- order_by=entity.c.path)
- })
+ "anscestors": relationship(
+ Entity,
+ primaryjoin=entity.c.path.like(
+ remote(foreign(entity.c.path)).concat('/%')),
+ viewonly=True,
+ order_by=entity.c.path)
+ })
configure_mappers()
assert m.get_property("anscestors").direction is ONETOMANY
if data:
@@ -447,7 +459,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
sess = self._descendants_fixture()
Entity = self.classes.Entity
e1 = sess.query(Entity).filter_by(path="/foo").\
- options(joinedload(Entity.descendants)).first()
+ options(joinedload(Entity.descendants)).first()
eq_(
[e.path for e in e1.descendants],
@@ -459,7 +471,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
sess = self._descendants_fixture()
Entity = self.classes.Entity
e1 = sess.query(Entity).filter_by(path="/foo").\
- options(subqueryload(Entity.descendants)).first()
+ options(subqueryload(Entity.descendants)).first()
eq_(
[e.path for e in e1.descendants],
@@ -471,7 +483,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
sess = self._anscestors_fixture()
Entity = self.classes.Entity
e1 = sess.query(Entity).filter_by(path="/foo/bar2/bat1").\
- options(joinedload(Entity.anscestors)).first()
+ options(joinedload(Entity.anscestors)).first()
eq_(
[e.path for e in e1.anscestors],
["/foo", "/foo/bar2"]
@@ -488,8 +500,8 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
)
-
class CompositeSelfRefFKTest(fixtures.MappedTest):
+
"""Tests a composite FK where, in
the relationship(), one col points
to itself in the same table.
@@ -515,7 +527,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table('company_t', metadata,
Column('company_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('name', String(30)))
Table('employee_t', metadata,
@@ -533,10 +545,12 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
@classmethod
def setup_classes(cls):
class Company(cls.Basic):
+
def __init__(self, name):
self.name = name
class Employee(cls.Basic):
+
def __init__(self, name, company, emp_id, reports_to=None):
self.name = name
self.company = company
@@ -545,116 +559,202 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
def test_explicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company,
- primaryjoin=employee_t.c.company_id==
- company_t.c.company_id,
- backref='employees'),
- 'reports_to':relationship(Employee, primaryjoin=
- sa.and_(
- employee_t.c.emp_id==employee_t.c.reports_to_id,
- employee_t.c.company_id==employee_t.c.company_id
- ),
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company,
+ primaryjoin=employee_t.c.company_id ==
+ company_t.c.company_id,
+ backref='employees'),
+ 'reports_to': relationship(Employee, primaryjoin=sa.and_(
+ employee_t.c.emp_id == employee_t.c.reports_to_id,
+ employee_t.c.company_id == employee_t.c.company_id
+ ),
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
- foreign_keys=[employee_t.c.reports_to_id, employee_t.c.company_id],
+ foreign_keys=[
+ employee_t.c.reports_to_id, employee_t.c.company_id],
backref=backref('employees',
- foreign_keys=[employee_t.c.reports_to_id,
- employee_t.c.company_id]))
+ foreign_keys=[employee_t.c.reports_to_id,
+ employee_t.c.company_id]))
})
self._test()
def test_implicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
foreign_keys=[employee_t.c.reports_to_id,
- employee_t.c.company_id],
- backref=backref('employees', foreign_keys=
- [employee_t.c.reports_to_id, employee_t.c.company_id])
- )
+ employee_t.c.company_id],
+ backref=backref(
+ 'employees',
+ foreign_keys=[
+ employee_t.c.reports_to_id, employee_t.c.company_id])
+ )
})
self._test()
def test_very_implicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
backref='employees'
- )
+ )
})
self._test()
def test_very_explicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
- _local_remote_pairs = [
- (employee_t.c.reports_to_id, employee_t.c.emp_id),
- (employee_t.c.company_id, employee_t.c.company_id)
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
+ _local_remote_pairs=[
+ (employee_t.c.reports_to_id, employee_t.c.emp_id),
+ (employee_t.c.company_id, employee_t.c.company_id)
],
- foreign_keys=[employee_t.c.reports_to_id,
- employee_t.c.company_id],
- backref=backref('employees', foreign_keys=
- [employee_t.c.reports_to_id, employee_t.c.company_id])
- )
+ foreign_keys=[
+ employee_t.c.reports_to_id,
+ employee_t.c.company_id],
+ backref=backref(
+ 'employees',
+ foreign_keys=[
+ employee_t.c.reports_to_id, employee_t.c.company_id])
+ )
})
self._test()
def test_annotated(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
primaryjoin=sa.and_(
- remote(employee_t.c.emp_id)==employee_t.c.reports_to_id,
- remote(employee_t.c.company_id)==employee_t.c.company_id
+ remote(employee_t.c.emp_id) == employee_t.c.reports_to_id,
+ remote(employee_t.c.company_id) == employee_t.c.company_id
),
backref=backref('employees')
- )
+ )
})
self._test()
+ def test_overlapping_warning(self):
+ Employee, Company, employee_t, company_t = (self.classes.Employee,
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
+
+ mapper(Company, company_t)
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
+ primaryjoin=sa.and_(
+ remote(employee_t.c.emp_id) == employee_t.c.reports_to_id,
+ remote(employee_t.c.company_id) == employee_t.c.company_id
+ ),
+ backref=backref('employees')
+ )
+ })
+
+ assert_raises_message(
+ exc.SAWarning,
+ r"relationship .* will copy column .* to column "
+ "employee_t.company_id, which conflicts with relationship\(s\)",
+ configure_mappers
+ )
+
+ def test_annotated_no_overwriting(self):
+ Employee, Company, employee_t, company_t = (self.classes.Employee,
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
+
+ mapper(Company, company_t)
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
+ primaryjoin=sa.and_(
+ remote(employee_t.c.emp_id) ==
+ foreign(employee_t.c.reports_to_id),
+ remote(employee_t.c.company_id) == employee_t.c.company_id
+ ),
+ backref=backref('employees')
+ )
+ })
+
+ self._test_no_warning()
+
+ def _test_no_overwrite(self, sess, expect_failure):
+ # test [ticket:3230]
+
+ Employee, Company = self.classes.Employee, self.classes.Company
+
+ c1 = sess.query(Company).filter_by(name='c1').one()
+ e3 = sess.query(Employee).filter_by(name='emp3').one()
+ e3.reports_to = None
+
+ if expect_failure:
+ # if foreign() isn't applied specifically to
+ # employee_t.c.reports_to_id only, then
+ # employee_t.c.company_id goes foreign as well and then
+ # this happens
+ assert_raises_message(
+ AssertionError,
+ "Dependency rule tried to blank-out primary key column "
+ "'employee_t.company_id'",
+ sess.flush
+ )
+ else:
+ sess.flush()
+ eq_(e3.company, c1)
+
+ @testing.emits_warning("relationship .* will copy column ")
def _test(self):
+ self._test_no_warning(overwrites=True)
+
+ def _test_no_warning(self, overwrites=False):
self._test_relationships()
sess = Session()
self._setup_data(sess)
self._test_lazy_relations(sess)
self._test_join_aliasing(sess)
+ self._test_no_overwrite(sess, expect_failure=overwrites)
def _test_relationships(self):
configure_mappers()
@@ -665,7 +765,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
set([
(employee_t.c.company_id, employee_t.c.company_id),
(employee_t.c.emp_id, employee_t.c.reports_to_id),
- ])
+ ])
)
eq_(
Employee.employees.property.remote_side,
@@ -676,7 +776,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
set([
(employee_t.c.company_id, employee_t.c.company_id),
(employee_t.c.reports_to_id, employee_t.c.emp_id),
- ])
+ ])
)
def _setup_data(self, sess):
@@ -686,12 +786,12 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
c2 = Company('c2')
e1 = Employee('emp1', c1, 1)
- e2 = Employee('emp2', c1, 2, e1)
+ e2 = Employee('emp2', c1, 2, e1) # noqa
e3 = Employee('emp3', c1, 3, e1)
- e4 = Employee('emp4', c1, 4, e3)
+ e4 = Employee('emp4', c1, 4, e3) # noqa
e5 = Employee('emp5', c2, 1)
- e6 = Employee('emp6', c2, 2, e5)
- e7 = Employee('emp7', c2, 3, e5)
+ e6 = Employee('emp6', c2, 2, e5) # noqa
+ e7 = Employee('emp7', c2, 3, e5) # noqa
sess.add_all((c1, c2))
sess.commit()
@@ -711,64 +811,66 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
assert test_e5.name == 'emp5', test_e5.name
assert [x.name for x in test_e1.employees] == ['emp2', 'emp3']
assert sess.query(Employee).\
- get([c1.company_id, 3]).reports_to.name == 'emp1'
+ get([c1.company_id, 3]).reports_to.name == 'emp1'
assert sess.query(Employee).\
- get([c2.company_id, 3]).reports_to.name == 'emp5'
+ get([c2.company_id, 3]).reports_to.name == 'emp5'
def _test_join_aliasing(self, sess):
Employee, Company = self.classes.Employee, self.classes.Company
eq_(
- [n for n, in sess.query(Employee.name).\
- join(Employee.reports_to, aliased=True).\
- filter_by(name='emp5').\
- reset_joinpoint().\
- order_by(Employee.name)],
+ [n for n, in sess.query(Employee.name).
+ join(Employee.reports_to, aliased=True).
+ filter_by(name='emp5').
+ reset_joinpoint().
+ order_by(Employee.name)],
['emp6', 'emp7']
)
-
class CompositeJoinPartialFK(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = 'default'
+
@classmethod
def define_tables(cls, metadata):
Table("parent", metadata,
- Column('x', Integer, primary_key=True),
- Column('y', Integer, primary_key=True),
- Column('z', Integer),
- )
+ Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True),
+ Column('z', Integer),
+ )
Table("child", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('x', Integer),
- Column('y', Integer),
- Column('z', Integer),
- # note 'z' is not here
- sa.ForeignKeyConstraint(
- ["x", "y"],
- ["parent.x", "parent.y"]
- )
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer),
+ Column('y', Integer),
+ Column('z', Integer),
+ # note 'z' is not here
+ sa.ForeignKeyConstraint(
+ ["x", "y"],
+ ["parent.x", "parent.y"]
+ )
+ )
+
@classmethod
def setup_mappers(cls):
parent, child = cls.tables.parent, cls.tables.child
+
class Parent(cls.Comparable):
pass
class Child(cls.Comparable):
pass
mapper(Parent, parent, properties={
- 'children':relationship(Child, primaryjoin=and_(
- parent.c.x==child.c.x,
- parent.c.y==child.c.y,
- parent.c.z==child.c.z,
+ 'children': relationship(Child, primaryjoin=and_(
+ parent.c.x == child.c.x,
+ parent.c.y == child.c.y,
+ parent.c.z == child.c.z,
))
})
mapper(Child, child)
def test_joins_fully(self):
Parent, Child = self.classes.Parent, self.classes.Child
- s = Session()
+
self.assert_compile(
Parent.children.property.strategy._lazywhere,
":param_1 = child.x AND :param_2 = child.y AND :param_3 = child.z"
@@ -776,19 +878,20 @@ class CompositeJoinPartialFK(fixtures.MappedTest, AssertsCompiledSQL):
class SynonymsAsFKsTest(fixtures.MappedTest):
+
"""Syncrules on foreign keys that are also primary"""
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id",Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column("foo",Integer,),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("foo", Integer,),
test_needs_fk=True)
- Table("tableB",metadata,
- Column("id",Integer,primary_key=True,
- test_needs_autoincrement=True),
+ Table("tableB", metadata,
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("_a_id", Integer, key='a_id', primary_key=True),
test_needs_fk=True)
@@ -798,6 +901,7 @@ class SynonymsAsFKsTest(fixtures.MappedTest):
pass
class B(cls.Basic):
+
@property
def a_id(self):
return self._a_id
@@ -832,18 +936,19 @@ class SynonymsAsFKsTest(fixtures.MappedTest):
class FKsAsPksTest(fixtures.MappedTest):
+
"""Syncrules on foreign keys that are also primary"""
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id",Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column("foo",Integer,),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("foo", Integer,),
test_needs_fk=True)
- Table("tableB",metadata,
- Column("id",Integer,ForeignKey("tableA.id"),primary_key=True),
+ Table("tableB", metadata,
+ Column("id", Integer, ForeignKey("tableA.id"), primary_key=True),
test_needs_fk=True)
@classmethod
@@ -863,9 +968,8 @@ class FKsAsPksTest(fixtures.MappedTest):
self.classes.B,
self.tables.tableA)
-
mapper(A, tableA, properties={
- 'b':relationship(B, cascade="all,delete-orphan", uselist=False)})
+ 'b': relationship(B, cascade="all,delete-orphan", uselist=False)})
mapper(B, tableB)
configure_mappers()
@@ -890,7 +994,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(A, tableA, properties={
- 'bs':relationship(B, cascade="save-update")})
+ 'bs': relationship(B, cascade="save-update")})
mapper(B, tableB)
a1 = A()
@@ -915,7 +1019,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(B, tableB, properties={
- 'a':relationship(A, cascade="save-update")})
+ 'a': relationship(A, cascade="save-update")})
mapper(A, tableA)
b1 = B()
@@ -938,7 +1042,8 @@ class FKsAsPksTest(fixtures.MappedTest):
A, tableA = self.classes.A, self.tables.tableA
# postgresql cant handle a nullable PK column...?
- tableC = Table('tablec', tableA.metadata,
+ tableC = Table(
+ 'tablec', tableA.metadata,
Column('id', Integer, primary_key=True),
Column('a_id', Integer, ForeignKey('tableA.id'),
primary_key=True, autoincrement=False, nullable=True))
@@ -947,7 +1052,7 @@ class FKsAsPksTest(fixtures.MappedTest):
class C(fixtures.BasicEntity):
pass
mapper(C, tableC, properties={
- 'a':relationship(A, cascade="save-update")
+ 'a': relationship(A, cascade="save-update")
})
mapper(A, tableA)
@@ -968,12 +1073,11 @@ class FKsAsPksTest(fixtures.MappedTest):
self.classes.B,
self.tables.tableA)
-
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
"save-update, delete, delete-orphan"):
mapper(B, tableB, properties={
- 'a':relationship(A, cascade=cascade, single_parent=True)
+ 'a': relationship(A, cascade=cascade, single_parent=True)
})
mapper(A, tableA)
@@ -999,12 +1103,11 @@ class FKsAsPksTest(fixtures.MappedTest):
self.classes.B,
self.tables.tableA)
-
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
"save-update, delete, delete-orphan"):
mapper(A, tableA, properties={
- 'bs':relationship(B, cascade=cascade)
+ 'bs': relationship(B, cascade=cascade)
})
mapper(B, tableB)
@@ -1029,7 +1132,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(A, tableA, properties={
- 'bs':relationship(B, cascade="none")})
+ 'bs': relationship(B, cascade="none")})
mapper(B, tableB)
a1 = A()
@@ -1054,7 +1157,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(B, tableB, properties={
- 'a':relationship(A, cascade="none")})
+ 'a': relationship(A, cascade="none")})
mapper(A, tableA)
b1 = B()
@@ -1070,39 +1173,42 @@ class FKsAsPksTest(fixtures.MappedTest):
assert a1 not in sess
assert b1 not in sess
+
class UniqueColReferenceSwitchTest(fixtures.MappedTest):
+
"""test a relationship based on a primary
join against a unique non-pk column"""
@classmethod
def define_tables(cls, metadata):
Table("table_a", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("ident", String(10), nullable=False,
- unique=True),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("ident", String(10), nullable=False,
+ unique=True),
+ )
Table("table_b", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("a_ident", String(10),
- ForeignKey('table_a.ident'),
- nullable=False),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("a_ident", String(10),
+ ForeignKey('table_a.ident'),
+ nullable=False),
+ )
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
def test_switch_parent(self):
A, B, table_b, table_a = (self.classes.A,
- self.classes.B,
- self.tables.table_b,
- self.tables.table_a)
+ self.classes.B,
+ self.tables.table_b,
+ self.tables.table_a)
mapper(A, table_a)
mapper(B, table_b, properties={"a": relationship(A, backref="bs")})
@@ -1122,7 +1228,9 @@ class UniqueColReferenceSwitchTest(fixtures.MappedTest):
session.delete(a1)
session.flush()
+
class RelationshipToSelectableTest(fixtures.MappedTest):
+
"""Test a map to a select that relates to a map to the table."""
@classmethod
@@ -1142,33 +1250,40 @@ class RelationshipToSelectableTest(fixtures.MappedTest):
class Container(fixtures.BasicEntity):
pass
+
class LineItem(fixtures.BasicEntity):
pass
container_select = sa.select(
[items.c.policyNum, items.c.policyEffDate, items.c.type],
distinct=True,
- ).alias('container_select')
+ ).alias('container_select')
mapper(LineItem, items)
- mapper(Container,
- container_select,
- order_by=sa.asc(container_select.c.type),
- properties=dict(
- lineItems=relationship(LineItem,
- lazy='select',
- cascade='all, delete-orphan',
- order_by=sa.asc(items.c.id),
- primaryjoin=sa.and_(
- container_select.c.policyNum==items.c.policyNum,
- container_select.c.policyEffDate==
- items.c.policyEffDate,
- container_select.c.type==items.c.type),
- foreign_keys=[
- items.c.policyNum,
- items.c.policyEffDate,
- items.c.type])))
+ mapper(
+ Container,
+ container_select,
+ order_by=sa.asc(container_select.c.type),
+ properties=dict(
+ lineItems=relationship(
+ LineItem,
+ lazy='select',
+ cascade='all, delete-orphan',
+ order_by=sa.asc(items.c.id),
+ primaryjoin=sa.and_(
+ container_select.c.policyNum == items.c.policyNum,
+ container_select.c.policyEffDate ==
+ items.c.policyEffDate,
+ container_select.c.type == items.c.type),
+ foreign_keys=[
+ items.c.policyNum,
+ items.c.policyEffDate,
+ items.c.type
+ ]
+ )
+ )
+ )
session = create_session()
con = Container()
@@ -1189,7 +1304,9 @@ class RelationshipToSelectableTest(fixtures.MappedTest):
for old, new in zip(con.lineItems, newcon.lineItems):
eq_(old.id, new.id)
+
class FKEquatedToConstantTest(fixtures.MappedTest):
+
"""test a relationship with a non-column entity in the primary join,
is not viewonly, and also has the non-column's clause mentioned in the
foreign keys list.
@@ -1199,31 +1316,32 @@ class FKEquatedToConstantTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('tags', metadata, Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("data", String(50)),
- )
+ test_needs_autoincrement=True),
+ Column("data", String(50)),
+ )
Table('tag_foo', metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('tagid', Integer),
- Column("data", String(50)),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('tagid', Integer),
+ Column("data", String(50)),
+ )
def test_basic(self):
tag_foo, tags = self.tables.tag_foo, self.tables.tags
class Tag(fixtures.ComparableEntity):
pass
+
class TagInstance(fixtures.ComparableEntity):
pass
mapper(Tag, tags, properties={
- 'foo':relationship(TagInstance,
- primaryjoin=sa.and_(tag_foo.c.data=='iplc_case',
- tag_foo.c.tagid==tags.c.id),
- foreign_keys=[tag_foo.c.tagid, tag_foo.c.data],
- ),
+ 'foo': relationship(
+ TagInstance,
+ primaryjoin=sa.and_(tag_foo.c.data == 'iplc_case',
+ tag_foo.c.tagid == tags.c.id),
+ foreign_keys=[tag_foo.c.tagid, tag_foo.c.data]),
})
mapper(TagInstance, tag_foo)
@@ -1248,41 +1366,43 @@ class FKEquatedToConstantTest(fixtures.MappedTest):
[TagInstance(data='iplc_case'), TagInstance(data='not_iplc_case')]
)
+
class BackrefPropagatesForwardsArgs(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50))
+ )
Table('addresses', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('user_id', Integer),
- Column('email', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer),
+ Column('email', String(50))
+ )
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
def test_backref(self):
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
-
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address,
- primaryjoin=addresses.c.user_id==users.c.id,
- foreign_keys=addresses.c.user_id,
- backref='user')
+ 'addresses': relationship(
+ Address,
+ primaryjoin=addresses.c.user_id == users.c.id,
+ foreign_keys=addresses.c.user_id,
+ backref='user')
})
mapper(Address, addresses)
@@ -1292,9 +1412,11 @@ class BackrefPropagatesForwardsArgs(fixtures.MappedTest):
sess.commit()
eq_(sess.query(Address).all(), [
Address(email='a1', user=User(name='u1'))
- ])
+ ])
+
class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
+
"""test ambiguous joins due to FKs on both sides treated as
self-referential.
@@ -1307,25 +1429,28 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- subscriber_table = Table('subscriber', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- )
-
- address_table = Table('address',
- metadata,
- Column('subscriber_id', Integer,
- ForeignKey('subscriber.id'), primary_key=True),
- Column('type', String(1), primary_key=True),
- )
+ Table(
+ 'subscriber', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
+
+ Table(
+ 'address', metadata,
+ Column(
+ 'subscriber_id', Integer,
+ ForeignKey('subscriber.id'), primary_key=True),
+ Column('type', String(1), primary_key=True),
+ )
@classmethod
def setup_mappers(cls):
subscriber, address = cls.tables.subscriber, cls.tables.address
- subscriber_and_address = subscriber.join(address,
- and_(address.c.subscriber_id==subscriber.c.id,
- address.c.type.in_(['A', 'B', 'C'])))
+ subscriber_and_address = subscriber.join(
+ address,
+ and_(address.c.subscriber_id == subscriber.c.id,
+ address.c.type.in_(['A', 'B', 'C'])))
class Address(cls.Comparable):
pass
@@ -1336,10 +1461,10 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
mapper(Address, address)
mapper(Subscriber, subscriber_and_address, properties={
- 'id':[subscriber.c.id, address.c.subscriber_id],
- 'addresses' : relationship(Address,
- backref=backref("customer"))
- })
+ 'id': [subscriber.c.id, address.c.subscriber_id],
+ 'addresses': relationship(Address,
+ backref=backref("customer"))
+ })
def test_mapping(self):
Subscriber, Address = self.classes.Subscriber, self.classes.Address
@@ -1349,11 +1474,11 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
assert Address.customer.property.direction is MANYTOONE
s1 = Subscriber(type='A',
- addresses = [
- Address(type='D'),
- Address(type='E'),
- ]
- )
+ addresses=[
+ Address(type='D'),
+ Address(type='E'),
+ ]
+ )
a1 = Address(type='B', customer=Subscriber(type='C'))
assert s1.addresses[0].customer is s1
@@ -1375,22 +1500,23 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
class ManualBackrefTest(_fixtures.FixtureTest):
+
"""Test explicit relationships that are backrefs to each other."""
run_inserts = None
def test_o2m(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, back_populates='user')
+ 'addresses': relationship(Address, back_populates='user')
})
mapper(Address, addresses, properties={
- 'user':relationship(User, back_populates='addresses')
+ 'user': relationship(User, back_populates='addresses')
})
sess = create_session()
@@ -1409,52 +1535,56 @@ class ManualBackrefTest(_fixtures.FixtureTest):
def test_invalid_key(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, back_populates='userr')
+ 'addresses': relationship(Address, back_populates='userr')
})
mapper(Address, addresses, properties={
- 'user':relationship(User, back_populates='addresses')
+ 'user': relationship(User, back_populates='addresses')
})
assert_raises(sa.exc.InvalidRequestError, configure_mappers)
def test_invalid_target(self):
- addresses, Dingaling, User, dingalings, Address, users = (self.tables.addresses,
- self.classes.Dingaling,
- self.classes.User,
- self.tables.dingalings,
- self.classes.Address,
- self.tables.users)
+ addresses, Dingaling, User, dingalings, Address, users = (
+ self.tables.addresses,
+ self.classes.Dingaling,
+ self.classes.User,
+ self.tables.dingalings,
+ self.classes.Address,
+ self.tables.users)
mapper(User, users, properties={
- 'addresses':relationship(Address, back_populates='dingaling'),
+ 'addresses': relationship(Address, back_populates='dingaling'),
})
mapper(Dingaling, dingalings)
mapper(Address, addresses, properties={
- 'dingaling':relationship(Dingaling)
+ 'dingaling': relationship(Dingaling)
})
assert_raises_message(sa.exc.ArgumentError,
- r"reverse_property 'dingaling' on relationship "
- "User.addresses references "
- "relationship Address.dingaling, which does not "
- "reference mapper Mapper\|User\|users",
- configure_mappers)
+ r"reverse_property 'dingaling' on relationship "
+ "User.addresses references "
+ "relationship Address.dingaling, which does not "
+ "reference mapper Mapper\|User\|users",
+ configure_mappers)
+
class JoinConditionErrorTest(fixtures.TestBase):
def test_clauseelement_pj(self):
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
+
class C1(Base):
__tablename__ = 'c1'
id = Column('id', Integer, primary_key=True)
+
class C2(Base):
__tablename__ = 'c2'
id = Column('id', Integer, primary_key=True)
@@ -1466,39 +1596,42 @@ class JoinConditionErrorTest(fixtures.TestBase):
def test_clauseelement_pj_false(self):
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
+
class C1(Base):
__tablename__ = 'c1'
id = Column('id', Integer, primary_key=True)
+
class C2(Base):
__tablename__ = 'c2'
id = Column('id', Integer, primary_key=True)
c1id = Column('c1id', Integer, ForeignKey('c1.id'))
- c2 = relationship(C1, primaryjoin="x"=="y")
+ c2 = relationship(C1, primaryjoin="x" == "y")
assert_raises(sa.exc.ArgumentError, configure_mappers)
def test_only_column_elements(self):
m = MetaData()
t1 = Table('t1', m,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer, ForeignKey('t2.id')),
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer, ForeignKey('t2.id')),
+ )
t2 = Table('t2', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
+
class C1(object):
pass
+
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2,
- primaryjoin=t1.join(t2))})
+ mapper(C1, t1, properties={
+ 'c2': relationship(C2, primaryjoin=t1.join(t2))})
mapper(C2, t2)
assert_raises(sa.exc.ArgumentError, configure_mappers)
def test_invalid_string_args(self):
from sqlalchemy.ext.declarative import declarative_base
- from sqlalchemy import util
for argname, arg in [
('remote_side', ['c1.id']),
@@ -1508,8 +1641,9 @@ class JoinConditionErrorTest(fixtures.TestBase):
('order_by', ['id']),
]:
clear_mappers()
- kw = {argname:arg}
+ kw = {argname: arg}
Base = declarative_base()
+
class C1(Base):
__tablename__ = 'c1'
id = Column('id', Integer, primary_key=True)
@@ -1527,51 +1661,52 @@ class JoinConditionErrorTest(fixtures.TestBase):
(argname, arg[0], type(arg[0])),
configure_mappers)
-
def test_fk_error_not_raised_unrelated(self):
m = MetaData()
t1 = Table('t1', m,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')),
- )
- t2 = Table('t2', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')),
+ )
+ t2 = Table('t2', m, # noqa
+ Column('id', Integer, primary_key=True),
+ )
t3 = Table('t3', m,
- Column('id', Integer, primary_key=True),
- Column('t1id', Integer, ForeignKey('t1.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('t1id', Integer, ForeignKey('t1.id'))
+ )
class C1(object):
pass
+
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2)})
+ mapper(C1, t1, properties={'c2': relationship(C2)})
mapper(C2, t3)
- assert C1.c2.property.primaryjoin.compare(t1.c.id==t3.c.t1id)
+ assert C1.c2.property.primaryjoin.compare(t1.c.id == t3.c.t1id)
def test_join_error_raised(self):
m = MetaData()
t1 = Table('t1', m,
- Column('id', Integer, primary_key=True),
- )
- t2 = Table('t2', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
+ t2 = Table('t2', m, # noqa
+ Column('id', Integer, primary_key=True),
+ )
t3 = Table('t3', m,
- Column('id', Integer, primary_key=True),
- Column('t1id', Integer)
- )
+ Column('id', Integer, primary_key=True),
+ Column('t1id', Integer)
+ )
class C1(object):
pass
+
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2)})
+ mapper(C1, t1, properties={'c2': relationship(C2)})
mapper(C2, t3)
assert_raises(sa.exc.ArgumentError, configure_mappers)
@@ -1579,7 +1714,9 @@ class JoinConditionErrorTest(fixtures.TestBase):
def teardown(self):
clear_mappers()
+
class TypeMatchTest(fixtures.MappedTest):
+
"""test errors raised when trying to add items
whose type is not handled by a relationship"""
@@ -1587,33 +1724,38 @@ class TypeMatchTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table("a", metadata,
Column('aid', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('adata', String(30)))
Table("b", metadata,
- Column('bid', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("a_id", Integer, ForeignKey("a.aid")),
- Column('bdata', String(30)))
+ Column('bid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("a_id", Integer, ForeignKey("a.aid")),
+ Column('bdata', String(30)))
Table("c", metadata,
Column('cid', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column("b_id", Integer, ForeignKey("b.bid")),
Column('cdata', String(30)))
Table("d", metadata,
Column('did', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column("a_id", Integer, ForeignKey("a.aid")),
Column('ddata', String(30)))
def test_o2m_oncascade(self):
a, c, b = (self.tables.a,
- self.tables.c,
- self.tables.b)
+ self.tables.c,
+ self.tables.b)
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class C(fixtures.BasicEntity): pass
- mapper(A, a, properties={'bs':relationship(B)})
+ class A(fixtures.BasicEntity):
+ pass
+
+ class B(fixtures.BasicEntity):
+ pass
+
+ class C(fixtures.BasicEntity):
+ pass
+ mapper(A, a, properties={'bs': relationship(B)})
mapper(B, b)
mapper(C, c)
@@ -1633,13 +1775,18 @@ class TypeMatchTest(fixtures.MappedTest):
def test_o2m_onflush(self):
a, c, b = (self.tables.a,
- self.tables.c,
- self.tables.b)
+ self.tables.c,
+ self.tables.b)
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class C(fixtures.BasicEntity): pass
- mapper(A, a, properties={'bs':relationship(B, cascade="none")})
+ class A(fixtures.BasicEntity):
+ pass
+
+ class B(fixtures.BasicEntity):
+ pass
+
+ class C(fixtures.BasicEntity):
+ pass
+ mapper(A, a, properties={'bs': relationship(B, cascade="none")})
mapper(B, b)
mapper(C, c)
@@ -1653,18 +1800,23 @@ class TypeMatchTest(fixtures.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
- sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
def test_o2m_nopoly_onflush(self):
a, c, b = (self.tables.a,
- self.tables.c,
- self.tables.b)
+ self.tables.c,
+ self.tables.b)
+
+ class A(fixtures.BasicEntity):
+ pass
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class C(B): pass
- mapper(A, a, properties={'bs':relationship(B, cascade="none")})
+ class B(fixtures.BasicEntity):
+ pass
+
+ class C(B):
+ pass
+ mapper(A, a, properties={'bs': relationship(B, cascade="none")})
mapper(B, b)
mapper(C, c, inherits=B)
@@ -1678,20 +1830,25 @@ class TypeMatchTest(fixtures.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
- sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
def test_m2o_nopoly_onflush(self):
a, b, d = (self.tables.a,
- self.tables.b,
- self.tables.d)
+ self.tables.b,
+ self.tables.d)
+
+ class A(fixtures.BasicEntity):
+ pass
- class A(fixtures.BasicEntity): pass
- class B(A): pass
- class D(fixtures.BasicEntity): pass
+ class B(A):
+ pass
+
+ class D(fixtures.BasicEntity):
+ pass
mapper(A, a)
mapper(B, b, inherits=A)
- mapper(D, d, properties={"a":relationship(A, cascade="none")})
+ mapper(D, d, properties={"a": relationship(A, cascade="none")})
b1 = B()
d1 = D()
d1.a = b1
@@ -1699,27 +1856,33 @@ class TypeMatchTest(fixtures.MappedTest):
sess.add(b1)
sess.add(d1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
- sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
def test_m2o_oncascade(self):
a, b, d = (self.tables.a,
- self.tables.b,
- self.tables.d)
+ self.tables.b,
+ self.tables.d)
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class D(fixtures.BasicEntity): pass
+ class A(fixtures.BasicEntity):
+ pass
+
+ class B(fixtures.BasicEntity):
+ pass
+
+ class D(fixtures.BasicEntity):
+ pass
mapper(A, a)
mapper(B, b)
- mapper(D, d, properties={"a":relationship(A)})
+ mapper(D, d, properties={"a": relationship(A)})
b1 = B()
d1 = D()
d1.a = b1
sess = create_session()
assert_raises_message(AssertionError,
- "doesn't handle objects of type",
- sess.add, d1)
+ "doesn't handle objects of type",
+ sess.add, d1)
+
class TypedAssociationTable(fixtures.MappedTest):
@@ -1727,8 +1890,10 @@ class TypedAssociationTable(fixtures.MappedTest):
def define_tables(cls, metadata):
class MySpecialType(sa.types.TypeDecorator):
impl = String
+
def process_bind_param(self, value, dialect):
return "lala" + value
+
def process_result_value(self, value, dialect):
return value[4:]
@@ -1746,15 +1911,17 @@ class TypedAssociationTable(fixtures.MappedTest):
"""Many-to-many tables with special types for candidate keys."""
t2, t3, t1 = (self.tables.t2,
- self.tables.t3,
- self.tables.t1)
+ self.tables.t3,
+ self.tables.t1)
+ class T1(fixtures.BasicEntity):
+ pass
- class T1(fixtures.BasicEntity): pass
- class T2(fixtures.BasicEntity): pass
+ class T2(fixtures.BasicEntity):
+ pass
mapper(T2, t2)
mapper(T1, t1, properties={
- 't2s':relationship(T2, secondary=t3, backref='t1s')})
+ 't2s': relationship(T2, secondary=t3, backref='t1s')})
a = T1()
a.col1 = "aid"
@@ -1775,7 +1942,9 @@ class TypedAssociationTable(fixtures.MappedTest):
assert t3.count().scalar() == 1
+
class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL):
+
"""test op() in conjunction with join conditions"""
run_create_tables = run_deletes = None
@@ -1785,47 +1954,50 @@ class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo', String(50))
+ )
Table('b', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo', String(50))
+ )
def test_join_on_custom_op(self):
class A(fixtures.BasicEntity):
pass
+
class B(fixtures.BasicEntity):
pass
mapper(A, self.tables.a, properties={
- 'bs': relationship(B,
- primaryjoin=self.tables.a.c.foo.op(
- '&*', is_comparison=True
- )(foreign(self.tables.b.c.foo)),
- viewonly=True
- )
- })
+ 'bs': relationship(B,
+ primaryjoin=self.tables.a.c.foo.op(
+ '&*', is_comparison=True
+ )(foreign(self.tables.b.c.foo)),
+ viewonly=True
+ )
+ })
mapper(B, self.tables.b)
self.assert_compile(
Session().query(A).join(A.bs),
- "SELECT a.id AS a_id, a.foo AS a_foo FROM a JOIN b ON a.foo &* b.foo"
+ "SELECT a.id AS a_id, a.foo AS a_foo "
+ "FROM a JOIN b ON a.foo &* b.foo"
)
class ViewOnlyHistoryTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t1id', Integer, ForeignKey('t1.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
def _assert_fk(self, a1, b1, is_set):
s = Session(testing.db)
@@ -1842,12 +2014,13 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_o2m_viewonly_oneside(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, self.tables.t1, properties={
"bs": relationship(B, viewonly=True,
- backref=backref("a", viewonly=False))
+ backref=backref("a", viewonly=False))
})
mapper(B, self.tables.t2)
@@ -1867,12 +2040,13 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_m2o_viewonly_oneside(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, self.tables.t1, properties={
"bs": relationship(B, viewonly=False,
- backref=backref("a", viewonly=True))
+ backref=backref("a", viewonly=True))
})
mapper(B, self.tables.t2)
@@ -1892,6 +2066,7 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_o2m_viewonly_only(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
@@ -1910,13 +2085,14 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_m2o_viewonly_only(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, self.tables.t1)
mapper(B, self.tables.t2, properties={
'a': relationship(A, viewonly=True)
- })
+ })
a1 = A()
b1 = B()
@@ -1925,34 +2101,39 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
self._assert_fk(a1, b1, False)
+
class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ )
Table("t1t2", metadata,
- Column('t1id', Integer, ForeignKey('t1.id'), primary_key=True),
- Column('t2id', Integer, ForeignKey('t2.id'), primary_key=True),
- )
+ Column('t1id', Integer, ForeignKey('t1.id'), primary_key=True),
+ Column('t2id', Integer, ForeignKey('t2.id'), primary_key=True),
+ )
def test_viewonly(self):
t1t2, t2, t1 = (self.tables.t1t2,
- self.tables.t2,
- self.tables.t1)
+ self.tables.t2,
+ self.tables.t1)
- class A(fixtures.ComparableEntity):pass
- class B(fixtures.ComparableEntity):pass
+ class A(fixtures.ComparableEntity):
+ pass
+
+ class B(fixtures.ComparableEntity):
+ pass
mapper(A, t1, properties={
- 'bs':relationship(B, secondary=t1t2,
- backref=backref('as_', viewonly=True))
+ 'bs': relationship(B, secondary=t1t2,
+ backref=backref('as_', viewonly=True))
})
mapper(B, t2)
@@ -1971,25 +2152,27 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
sess.query(B).first(), B(as_=[A(id=a1.id)])
)
+
class ViewOnlyOverlappingNames(fixtures.MappedTest):
+
"""'viewonly' mappings with overlapping PK column names."""
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t1id', Integer, ForeignKey('t1.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
Table("t3", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t2id', Integer, ForeignKey('t2.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t2id', Integer, ForeignKey('t2.id')))
def test_three_table_view(self):
"""A three table join with overlapping PK names.
@@ -2001,23 +2184,29 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
"""
t2, t3, t1 = (self.tables.t2,
- self.tables.t3,
- self.tables.t1)
+ self.tables.t3,
+ self.tables.t1)
+
+ class C1(fixtures.BasicEntity):
+ pass
+
+ class C2(fixtures.BasicEntity):
+ pass
- class C1(fixtures.BasicEntity): pass
- class C2(fixtures.BasicEntity): pass
- class C3(fixtures.BasicEntity): pass
+ class C3(fixtures.BasicEntity):
+ pass
mapper(C1, t1, properties={
- 't2s':relationship(C2),
- 't2_view':relationship(C2,
- viewonly=True,
- primaryjoin=sa.and_(t1.c.id==t2.c.t1id,
- t3.c.t2id==t2.c.id,
- t3.c.data==t1.c.data))})
+ 't2s': relationship(C2),
+ 't2_view': relationship(
+ C2,
+ viewonly=True,
+ primaryjoin=sa.and_(t1.c.id == t2.c.t1id,
+ t3.c.t2id == t2.c.id,
+ t3.c.data == t1.c.data))})
mapper(C2, t2)
mapper(C3, t3, properties={
- 't2':relationship(C2)})
+ 't2': relationship(C2)})
c1 = C1()
c1.data = 'c1data'
@@ -2026,7 +2215,7 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
c2b = C2()
c1.t2s.append(c2b)
c3 = C3()
- c3.data='c1data'
+ c3.data = 'c1data'
c3.t2 = c2b
sess = create_session()
sess.add(c1)
@@ -2038,25 +2227,27 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
assert set([x.id for x in c1.t2s]) == set([c2a.id, c2b.id])
assert set([x.id for x in c1.t2_view]) == set([c2b.id])
+
class ViewOnlyUniqueNames(fixtures.MappedTest):
+
"""'viewonly' mappings with unique PK column names."""
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('t1id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('t1id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('t2id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
+ Column('t2id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
Table("t3", metadata,
- Column('t3id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
+ Column('t3id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
def test_three_table_view(self):
"""A three table join with overlapping PK names.
@@ -2067,23 +2258,29 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
"""
t2, t3, t1 = (self.tables.t2,
- self.tables.t3,
- self.tables.t1)
+ self.tables.t3,
+ self.tables.t1)
+
+ class C1(fixtures.BasicEntity):
+ pass
+
+ class C2(fixtures.BasicEntity):
+ pass
- class C1(fixtures.BasicEntity): pass
- class C2(fixtures.BasicEntity): pass
- class C3(fixtures.BasicEntity): pass
+ class C3(fixtures.BasicEntity):
+ pass
mapper(C1, t1, properties={
- 't2s':relationship(C2),
- 't2_view':relationship(C2,
- viewonly=True,
- primaryjoin=sa.and_(t1.c.t1id==t2.c.t1id_ref,
- t3.c.t2id_ref==t2.c.t2id,
- t3.c.data==t1.c.data))})
+ 't2s': relationship(C2),
+ 't2_view': relationship(
+ C2,
+ viewonly=True,
+ primaryjoin=sa.and_(t1.c.t1id == t2.c.t1id_ref,
+ t3.c.t2id_ref == t2.c.t2id,
+ t3.c.data == t1.c.data))})
mapper(C2, t2)
mapper(C3, t3, properties={
- 't2':relationship(C2)})
+ 't2': relationship(C2)})
c1 = C1()
c1.data = 'c1data'
@@ -2092,7 +2289,7 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
c2b = C2()
c1.t2s.append(c2b)
c3 = C3()
- c3.data='c1data'
+ c3.data = 'c1data'
c3.t2 = c2b
sess = create_session()
@@ -2104,30 +2301,35 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
assert set([x.t2id for x in c1.t2s]) == set([c2a.t2id, c2b.t2id])
assert set([x.t2id for x in c1.t2_view]) == set([c2b.t2id])
+
class ViewOnlyLocalRemoteM2M(fixtures.TestBase):
+
"""test that local-remote is correctly determined for m2m"""
def test_local_remote(self):
meta = MetaData()
t1 = Table('t1', meta,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
t2 = Table('t2', meta,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
t12 = Table('tab', meta,
- Column('t1_id', Integer, ForeignKey('t1.id',)),
- Column('t2_id', Integer, ForeignKey('t2.id',)),
- )
+ Column('t1_id', Integer, ForeignKey('t1.id',)),
+ Column('t2_id', Integer, ForeignKey('t2.id',)),
+ )
- class A(object): pass
- class B(object): pass
- mapper( B, t2, )
- m = mapper( A, t1, properties=dict(
- b_view = relationship( B, secondary=t12, viewonly=True),
- b_plain= relationship( B, secondary=t12),
- )
+ class A(object):
+ pass
+
+ class B(object):
+ pass
+ mapper(B, t2, )
+ m = mapper(A, t1, properties=dict(
+ b_view=relationship(B, secondary=t12, viewonly=True),
+ b_plain=relationship(B, secondary=t12),
+ )
)
configure_mappers()
assert m.get_property('b_view').local_remote_pairs == \
@@ -2135,31 +2337,32 @@ class ViewOnlyLocalRemoteM2M(fixtures.TestBase):
[(t1.c.id, t12.c.t1_id), (t2.c.id, t12.c.t2_id)]
-
class ViewOnlyNonEquijoin(fixtures.MappedTest):
+
"""'viewonly' mappings based on non-equijoins."""
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
- Column('id', Integer, primary_key=True))
+ Column('id', Integer, primary_key=True))
Table('bars', metadata,
- Column('id', Integer, primary_key=True),
- Column('fid', Integer))
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer))
def test_viewonly_join(self):
bars, foos = self.tables.bars, self.tables.foos
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(fixtures.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id > bars.c.fid,
- foreign_keys=[bars.c.fid],
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ foreign_keys=[bars.c.fid],
+ viewonly=True)})
mapper(Bar, bars)
@@ -2180,17 +2383,22 @@ class ViewOnlyNonEquijoin(fixtures.MappedTest):
class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
+
"""'viewonly' mappings that contain the same 'remote' column twice"""
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('bid1', Integer,ForeignKey('bars.id')),
- Column('bid2', Integer,ForeignKey('bars.id')))
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('bid1', Integer, ForeignKey('bars.id')),
+ Column('bid2', Integer, ForeignKey('bars.id')))
Table('bars', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
def test_relationship_on_or(self):
@@ -2198,15 +2406,16 @@ class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(fixtures.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=sa.or_(bars.c.id == foos.c.bid1,
- bars.c.id == foos.c.bid2),
- uselist=True,
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=sa.or_(bars.c.id == foos.c.bid1,
+ bars.c.id == foos.c.bid2),
+ uselist=True,
+ viewonly=True)})
mapper(Bar, bars)
sess = create_session()
@@ -2228,18 +2437,20 @@ class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
eq_(sess.query(Foo).filter_by(id=f2.id).one(),
Foo(bars=[Bar(data='b3')]))
+
class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
+
"""'viewonly' mappings that contain the same 'local' column twice"""
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('bars', metadata, Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('fid1', Integer, ForeignKey('foos.id')),
Column('fid2', Integer, ForeignKey('foos.id')),
Column('data', String(50)))
@@ -2249,14 +2460,15 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(fixtures.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=sa.or_(bars.c.fid1 == foos.c.id,
- bars.c.fid2 == foos.c.id),
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=sa.or_(bars.c.fid1 == foos.c.id,
+ bars.c.fid2 == foos.c.id),
+ viewonly=True)})
mapper(Bar, bars)
sess = create_session()
@@ -2279,57 +2491,61 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
eq_(sess.query(Foo).filter_by(id=f2.id).one(),
Foo(bars=[Bar(data='b3'), Bar(data='b4')]))
+
class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
+
"""'viewonly' mappings with a complex join condition."""
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)),
- Column('t1id', Integer, ForeignKey('t1.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
Table('t3', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('t2tot3', metadata,
- Column('t2id', Integer, ForeignKey('t2.id')),
- Column('t3id', Integer, ForeignKey('t3.id')))
+ Column('t2id', Integer, ForeignKey('t2.id')),
+ Column('t3id', Integer, ForeignKey('t3.id')))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
+
class T2(cls.Comparable):
pass
+
class T3(cls.Comparable):
pass
def test_basic(self):
T1, t2, T2, T3, t3, t2tot3, t1 = (self.classes.T1,
- self.tables.t2,
- self.classes.T2,
- self.classes.T3,
- self.tables.t3,
- self.tables.t2tot3,
- self.tables.t1)
+ self.tables.t2,
+ self.classes.T2,
+ self.classes.T3,
+ self.tables.t3,
+ self.tables.t2tot3,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't3s':relationship(T3, primaryjoin=sa.and_(
- t1.c.id==t2.c.t1id,
- t2.c.id==t2tot3.c.t2id,
- t3.c.id==t2tot3.c.t3id),
- viewonly=True,
- foreign_keys=t3.c.id, remote_side=t2.c.t1id)
+ 't3s': relationship(T3, primaryjoin=sa.and_(
+ t1.c.id == t2.c.t1id,
+ t2.c.id == t2tot3.c.t2id,
+ t3.c.id == t2tot3.c.t3id),
+ viewonly=True,
+ foreign_keys=t3.c.id, remote_side=t2.c.t1id)
})
mapper(T2, t2, properties={
- 't1':relationship(T1),
- 't3s':relationship(T3, secondary=t2tot3)
+ 't1': relationship(T1),
+ 't3s': relationship(T3, secondary=t2tot3)
})
mapper(T3, t3)
@@ -2341,31 +2557,32 @@ class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
a = sess.query(T1).first()
eq_(a.t3s, [T3(data='t3')])
-
def test_remote_side_escalation(self):
T1, t2, T2, T3, t3, t2tot3, t1 = (self.classes.T1,
- self.tables.t2,
- self.classes.T2,
- self.classes.T3,
- self.tables.t3,
- self.tables.t2tot3,
- self.tables.t1)
+ self.tables.t2,
+ self.classes.T2,
+ self.classes.T3,
+ self.tables.t3,
+ self.tables.t2tot3,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't3s':relationship(T3,
- primaryjoin=sa.and_(t1.c.id==t2.c.t1id,
- t2.c.id==t2tot3.c.t2id,
- t3.c.id==t2tot3.c.t3id
- ),
- viewonly=True,
- foreign_keys=t3.c.id)})
+ 't3s': relationship(T3,
+ primaryjoin=sa.and_(t1.c.id == t2.c.t1id,
+ t2.c.id == t2tot3.c.t2id,
+ t3.c.id == t2tot3.c.t3id
+ ),
+ viewonly=True,
+ foreign_keys=t3.c.id)})
mapper(T2, t2, properties={
- 't1':relationship(T1),
- 't3s':relationship(T3, secondary=t2tot3)})
+ 't1': relationship(T1),
+ 't3s': relationship(T3, secondary=t2tot3)})
mapper(T3, t3)
self._assert_raises_no_local_remote(configure_mappers, "T1.t3s")
+
class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
+
"""test a complex annotation using between().
Using declarative here as an integration test for the local()
@@ -2381,23 +2598,23 @@ class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
__tablename__ = "network"
id = Column(sa.Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
ip_net_addr = Column(Integer)
ip_broadcast_addr = Column(Integer)
- addresses = relationship("Address",
- primaryjoin="remote(foreign(Address.ip_addr)).between("
- "Network.ip_net_addr,"
- "Network.ip_broadcast_addr)",
- viewonly=True
- )
+ addresses = relationship(
+ "Address",
+ primaryjoin="remote(foreign(Address.ip_addr)).between("
+ "Network.ip_net_addr,"
+ "Network.ip_broadcast_addr)",
+ viewonly=True
+ )
class Address(fixtures.ComparableEntity, Base):
__tablename__ = "address"
ip_addr = Column(Integer, primary_key=True)
-
@classmethod
def insert_data(cls):
Network, Address = cls.classes.Network, cls.classes.Address
@@ -2417,11 +2634,11 @@ class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
session = Session(testing.db)
eq_(
- session.query(Address.ip_addr).\
- select_from(Network).\
- join(Network.addresses).\
- filter(Network.ip_net_addr == 15).\
- all(),
+ session.query(Address.ip_addr).
+ select_from(Network).
+ join(Network.addresses).
+ filter(Network.ip_net_addr == 15).
+ all(),
[(17, ), (18, )]
)
@@ -2439,59 +2656,61 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', String(50), primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', String(50), primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)),
- Column('t1id', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('t1id', String(50)))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
+
class T2(cls.Comparable):
pass
def test_onetomany_funcfk_oldstyle(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
# old _local_remote_pairs
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id]
- )
- })
+ 't2s': relationship(
+ T2,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id]
+ )
+ })
mapper(T2, t2)
self._test_onetomany()
def test_onetomany_funcfk_annotated(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
# use annotation
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==
- foreign(sa.func.lower(t2.c.t1id)),
- )})
+ 't2s': relationship(T2,
+ primaryjoin=t1.c.id ==
+ foreign(sa.func.lower(t2.c.t1id)),
+ )})
mapper(T2, t2)
self._test_onetomany()
def _test_onetomany(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
is_(T1.t2s.property.direction, ONETOMANY)
eq_(T1.t2s.property.local_remote_pairs, [(t1.c.id, t2.c.t1id)])
sess = create_session()
@@ -2511,17 +2730,17 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
def test_manytoone_funcfk(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1)
mapper(T2, t2, properties={
- 't1':relationship(T1,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
- foreign_keys=[t2.c.t1id],
- uselist=True)})
+ 't1': relationship(T1,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
+ foreign_keys=[t2.c.t1id],
+ uselist=True)})
sess = create_session()
a1 = T1(id='number1', data='a1')
@@ -2539,15 +2758,16 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
def test_onetomany_func_referent(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id,
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id])})
+ 't2s': relationship(
+ T2,
+ primaryjoin=sa.func.lower(t1.c.id) == t2.c.t1id,
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id])})
mapper(T2, t2)
sess = create_session()
@@ -2562,21 +2782,21 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
eq_(sess.query(T1).first(),
T1(id='NuMbeR1', data='a1', t2s=[
- T2(data='b1', t1id='number1'),
- T2(data='b2', t1id='number1')]))
+ T2(data='b1', t1id='number1'),
+ T2(data='b2', t1id='number1')]))
def test_manytoone_func_referent(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1)
mapper(T2, t2, properties={
- 't1':relationship(T1,
- primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id,
- _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
- foreign_keys=[t2.c.t1id], uselist=True)})
+ 't1': relationship(T1,
+ primaryjoin=sa.func.lower(t1.c.id) == t2.c.t1id,
+ _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
+ foreign_keys=[t2.c.t1id], uselist=True)})
sess = create_session()
a1 = T1(id='NuMbeR1', data='a1')
@@ -2594,40 +2814,44 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
def test_escalation_1(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id],
- remote_side=[t2.c.t1id])})
+ 't2s': relationship(
+ T2,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id],
+ remote_side=[t2.c.t1id])})
mapper(T2, t2)
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
def test_escalation_2(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)])})
+ 't2s': relationship(
+ T2,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)])})
mapper(T2, t2)
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
+
class InvalidRemoteSideTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50)),
- Column('t_id', Integer, ForeignKey('t1.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)),
+ Column('t_id', Integer, ForeignKey('t1.id'))
+ )
@classmethod
def setup_classes(cls):
@@ -2638,10 +2862,11 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, backref='parent')
+ 't1s': relationship(T1, backref='parent')
})
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
r"both of the same direction symbol\('ONETOMANY'\). Did you "
"mean to set remote_side on the many-to-one side ?",
@@ -2651,12 +2876,13 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1,
- backref=backref('parent', remote_side=t1.c.id),
- remote_side=t1.c.id)
+ 't1s': relationship(T1,
+ backref=backref('parent', remote_side=t1.c.id),
+ remote_side=t1.c.id)
})
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
r"both of the same direction symbol\('MANYTOONE'\). Did you "
"mean to set remote_side on the many-to-one side ?",
@@ -2666,12 +2892,13 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, back_populates='parent'),
- 'parent':relationship(T1, back_populates='t1s'),
+ 't1s': relationship(T1, back_populates='parent'),
+ 'parent': relationship(T1, back_populates='t1s'),
})
# can't be sure of ordering here
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
r"both of the same direction symbol\('ONETOMANY'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
@@ -2680,44 +2907,48 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, back_populates='parent',
+ 't1s': relationship(T1, back_populates='parent',
remote_side=t1.c.id),
- 'parent':relationship(T1, back_populates='t1s',
- remote_side=t1.c.id)
+ 'parent': relationship(T1, back_populates='t1s',
+ remote_side=t1.c.id)
})
# can't be sure of ordering here
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
r"both of the same direction symbol\('MANYTOONE'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
+
class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
- Column('id', Integer, primary_key=True)
- )
+ Column('id', Integer, primary_key=True)
+ )
Table("b", metadata,
- Column('id', Integer, primary_key=True),
- Column('aid_1', Integer, ForeignKey('a.id')),
- Column('aid_2', Integer, ForeignKey('a.id')),
- )
+ Column('id', Integer, primary_key=True),
+ Column('aid_1', Integer, ForeignKey('a.id')),
+ Column('aid_2', Integer, ForeignKey('a.id')),
+ )
Table("atob", metadata,
- Column('aid', Integer),
- Column('bid', Integer),
- )
+ Column('aid', Integer),
+ Column('bid', Integer),
+ )
Table("atob_ambiguous", metadata,
- Column('aid1', Integer, ForeignKey('a.id')),
- Column('bid1', Integer, ForeignKey('b.id')),
- Column('aid2', Integer, ForeignKey('a.id')),
- Column('bid2', Integer, ForeignKey('b.id')),
- )
+ Column('aid1', Integer, ForeignKey('a.id')),
+ Column('bid1', Integer, ForeignKey('b.id')),
+ Column('aid2', Integer, ForeignKey('a.id')),
+ Column('bid2', Integer, ForeignKey('b.id')),
+ )
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
+
class B(cls.Basic):
pass
@@ -2725,7 +2956,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B)
+ 'bs': relationship(B)
})
mapper(B, b)
self._assert_raises_ambig_join(
@@ -2738,12 +2969,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B, foreign_keys=b.c.aid_1)
+ 'bs': relationship(B, foreign_keys=b.c.aid_1)
})
mapper(B, b)
sa.orm.configure_mappers()
assert A.bs.property.primaryjoin.compare(
- a.c.id==b.c.aid_1
+ a.c.id == b.c.aid_1
)
eq_(
A.bs.property._calculated_foreign_keys,
@@ -2754,12 +2985,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B, primaryjoin=a.c.id==b.c.aid_1)
+ 'bs': relationship(B, primaryjoin=a.c.id == b.c.aid_1)
})
mapper(B, b)
sa.orm.configure_mappers()
assert A.bs.property.primaryjoin.compare(
- a.c.id==b.c.aid_1
+ a.c.id == b.c.aid_1
)
eq_(
A.bs.property._calculated_foreign_keys,
@@ -2770,12 +3001,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B, primaryjoin=a.c.id==foreign(b.c.aid_1))
+ 'bs': relationship(B, primaryjoin=a.c.id == foreign(b.c.aid_1))
})
mapper(B, b)
sa.orm.configure_mappers()
assert A.bs.property.primaryjoin.compare(
- a.c.id==b.c.aid_1
+ a.c.id == b.c.aid_1
)
eq_(
A.bs.property._calculated_foreign_keys,
@@ -2786,7 +3017,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b)
+ 'bs': relationship(B, secondary=a_to_b)
})
mapper(B, b)
self._assert_raises_no_join(
@@ -2798,7 +3029,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b)
+ 'bs': relationship(B, secondary=a_to_b)
})
mapper(B, b)
@@ -2808,20 +3039,20 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
"atob_ambiguous"
)
-
def test_with_fks_m2m(self):
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b,
- foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1])
+ 'bs': relationship(B, secondary=a_to_b,
+ foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1])
})
mapper(B, b)
sa.orm.configure_mappers()
class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
- testing.AssertsExecutionResults):
+ testing.AssertsExecutionResults):
+
"""test support for a relationship where the 'secondary' table is a
compound join().
@@ -2835,35 +3066,44 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
@classmethod
def define_tables(cls, metadata):
- Table('a', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('b_id', ForeignKey('b.id'))
- )
+ Table(
+ 'a', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('b_id', ForeignKey('b.id'))
+ )
Table('b', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('d_id', ForeignKey('d.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('d_id', ForeignKey('d.id'))
+ )
Table('c', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('a_id', ForeignKey('a.id')),
- Column('d_id', ForeignKey('d.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('a_id', ForeignKey('a.id')),
+ Column('d_id', ForeignKey('d.id'))
+ )
Table('d', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ )
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
+
class C(cls.Comparable):
pass
+
class D(cls.Comparable):
pass
@@ -2875,21 +3115,23 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
#j = join(b, d, b.c.d_id == d.c.id).join(c, c.c.d_id == d.c.id).alias()
mapper(A, a, properties={
"b": relationship(B),
- "d": relationship(D, secondary=j,
- primaryjoin=and_(a.c.b_id == b.c.id, a.c.id == c.c.a_id),
- secondaryjoin=d.c.id == b.c.d_id,
- #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id),
- #secondaryjoin=d.c.id == j.c.b_d_id,
- uselist=False
- )
- })
+ "d": relationship(
+ D, secondary=j,
+ primaryjoin=and_(a.c.b_id == b.c.id, a.c.id == c.c.a_id),
+ secondaryjoin=d.c.id == b.c.d_id,
+ #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id),
+ #secondaryjoin=d.c.id == j.c.b_d_id,
+ uselist=False,
+ viewonly=True
+ )
+ })
mapper(B, b, properties={
- "d": relationship(D)
- })
+ "d": relationship(D)
+ })
mapper(C, c, properties={
- "a": relationship(A),
- "d": relationship(D)
- })
+ "a": relationship(A),
+ "d": relationship(D)
+ })
mapper(D, d)
@classmethod
@@ -2931,8 +3173,8 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
sess.query(A).join(A.d),
"SELECT a.id AS a_id, a.name AS a_name, a.b_id AS a_b_id "
"FROM a JOIN (b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id "
- "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
- "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
+ "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
+ "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
dialect="postgresql"
)
@@ -2944,8 +3186,8 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
"SELECT a.id AS a_id, a.name AS a_name, a.b_id AS a_b_id, "
"d_1.id AS d_1_id, d_1.name AS d_1_name FROM a LEFT OUTER JOIN "
"(b AS b_1 JOIN d AS d_2 ON b_1.d_id = d_2.id JOIN c AS c_1 "
- "ON c_1.d_id = d_2.id JOIN d AS d_1 ON d_1.id = b_1.d_id) "
- "ON a.b_id = b_1.id AND a.id = c_1.a_id",
+ "ON c_1.d_id = d_2.id JOIN d AS d_1 ON d_1.id = b_1.d_id) "
+ "ON a.b_id = b_1.id AND a.id = c_1.a_id",
dialect="postgresql"
)
@@ -2964,14 +3206,15 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
# referring to just the columns wont actually render all those
# join conditions.
self.assert_sql_execution(
- testing.db,
- go,
- CompiledSQL(
- "SELECT d.id AS d_id, d.name AS d_name FROM b "
- "JOIN d ON b.d_id = d.id JOIN c ON c.d_id = d.id "
- "WHERE :param_1 = b.id AND :param_2 = c.a_id AND d.id = b.d_id",
- {'param_1': a1.id, 'param_2': a1.id}
- )
+ testing.db,
+ go,
+ CompiledSQL(
+ "SELECT d.id AS d_id, d.name AS d_name FROM b "
+ "JOIN d ON b.d_id = d.id JOIN c ON c.d_id = d.id "
+ "WHERE :param_1 = b.id AND :param_2 = c.a_id "
+ "AND d.id = b.d_id",
+ {'param_1': a1.id, 'param_2': a1.id}
+ )
)
mapping = {
@@ -2988,7 +3231,6 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
for a, d in sess.query(A, D).outerjoin(A.d):
eq_(self.mapping[a.name], d.name if d is not None else None)
-
def test_joinedload(self):
A, D = self.classes.A, self.classes.D
sess = Session()
@@ -3005,7 +3247,9 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
d = a.d
eq_(self.mapping[a.name], d.name if d is not None else None)
-class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest):
+
+class InvalidRelationshipEscalationTest(
+ _RelationshipErrors, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -3017,20 +3261,20 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
Column('fid', Integer))
Table('foos_with_fks', metadata,
- Column('id', Integer, primary_key=True),
- Column('fid', Integer, ForeignKey('foos_with_fks.id')))
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer, ForeignKey('foos_with_fks.id')))
Table('bars_with_fks', metadata,
- Column('id', Integer, primary_key=True),
- Column('fid', Integer, ForeignKey('foos_with_fks.id')))
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer, ForeignKey('foos_with_fks.id')))
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
+
class Bar(cls.Basic):
pass
-
def test_no_join(self):
bars, Foo, Bar, foos = (self.tables.bars,
self.classes.Foo,
@@ -3038,12 +3282,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar)})
+ 'bars': relationship(Bar)})
mapper(Bar, bars)
self._assert_raises_no_join(sa.orm.configure_mappers,
- "Foo.bars", None
- )
+ "Foo.bars", None
+ )
def test_no_join_self_ref(self):
bars, Foo, Bar, foos = (self.tables.bars,
@@ -3052,7 +3296,7 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo)})
+ 'foos': relationship(Foo)})
mapper(Bar, bars)
self._assert_raises_no_join(
@@ -3068,8 +3312,8 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id>bars.c.fid)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3084,9 +3328,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id>bars.c.fid,
- foreign_keys=bars.c.fid)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ foreign_keys=bars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_equality(
sa.orm.configure_mappers,
@@ -3094,25 +3338,27 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
)
def test_no_equated_wo_fks_works_on_relaxed(self):
- foos_with_fks, Foo, Bar, bars_with_fks, foos = (self.tables.foos_with_fks,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos)
+ foos_with_fks, Foo, Bar, bars_with_fks, foos = (
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos)
# very unique - the join between parent/child
# has no fks, but there is an fk join between two other
# tables in the join condition, for those users that try creating
# these big-long-string-of-joining-many-tables primaryjoins.
- # in this case we don't get eq_pairs, but we hit the "works if viewonly"
- # rule. so here we add another clause regarding "try foreign keys".
+ # in this case we don't get eq_pairs, but we hit the
+ # "works if viewonly" rule. so here we add another clause regarding
+ # "try foreign keys".
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=and_(
- bars_with_fks.c.fid==foos_with_fks.c.id,
- foos_with_fks.c.id==foos.c.id,
- )
- )})
+ 'bars': relationship(Bar,
+ primaryjoin=and_(
+ bars_with_fks.c.fid == foos_with_fks.c.id,
+ foos_with_fks.c.id == foos.c.id,
+ )
+ )})
mapper(Bar, bars_with_fks)
self._assert_raises_no_equality(
@@ -3129,9 +3375,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid,
- foreign_keys=[foos.c.id, bars.c.fid])})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid,
+ foreign_keys=[foos.c.id, bars.c.fid])})
mapper(Bar, bars)
self._assert_raises_ambiguous_direction(
@@ -3146,12 +3392,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid,
- foreign_keys=[bars.c.fid],
- remote_side=[foos.c.id, bars.c.fid],
- viewonly=True
- )})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid,
+ foreign_keys=[bars.c.fid],
+ remote_side=[foos.c.id, bars.c.fid],
+ viewonly=True
+ )})
mapper(Bar, bars)
self._assert_raises_no_local_remote(
@@ -3159,7 +3405,6 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
"Foo.bars",
)
-
def test_ambiguous_remoteside_m2o(self):
bars, Foo, Bar, foos = (self.tables.bars,
self.classes.Foo,
@@ -3167,12 +3412,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid,
- foreign_keys=[foos.c.id],
- remote_side=[foos.c.id, bars.c.fid],
- viewonly=True
- )})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid,
+ foreign_keys=[foos.c.id],
+ remote_side=[foos.c.id, bars.c.fid],
+ viewonly=True
+ )})
mapper(Bar, bars)
self._assert_raises_no_local_remote(
@@ -3180,7 +3425,6 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
"Foo.bars",
)
-
def test_no_equated_self_ref_no_fks(self):
bars, Foo, Bar, foos = (self.tables.bars,
self.classes.Foo,
@@ -3188,14 +3432,14 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid)})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid)})
mapper(Bar, bars)
- self._assert_raises_no_relevant_fks(configure_mappers,
- "foos.id > foos.fid", "Foo.foos", "primary"
- )
-
+ self._assert_raises_no_relevant_fks(
+ configure_mappers,
+ "foos.id > foos.fid", "Foo.foos", "primary"
+ )
def test_no_equated_self_ref_no_equality(self):
bars, Foo, Bar, foos = (self.tables.bars,
@@ -3204,27 +3448,28 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid,
- foreign_keys=[foos.c.fid])})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid,
+ foreign_keys=[foos.c.fid])})
mapper(Bar, bars)
self._assert_raises_no_equality(configure_mappers,
- "foos.id > foos.fid", "Foo.foos", "primary"
- )
+ "foos.id > foos.fid", "Foo.foos", "primary"
+ )
def test_no_equated_viewonly(self):
- bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos_with_fks,
- self.classes.Foo,
- self.tables.foos)
+ bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id>bars.c.fid,
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ viewonly=True)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3234,24 +3479,26 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos_with_fks.c.id>bars_with_fks.c.fid,
- viewonly=True)})
+ 'bars': relationship(
+ Bar,
+ primaryjoin=foos_with_fks.c.id > bars_with_fks.c.fid,
+ viewonly=True)})
mapper(Bar, bars_with_fks)
sa.orm.configure_mappers()
def test_no_equated_self_ref_viewonly(self):
- bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos_with_fks,
- self.classes.Foo,
- self.tables.foos)
+ bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid,
- viewonly=True)})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid,
+ viewonly=True)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3261,9 +3508,10 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos_with_fks.c.id>foos_with_fks.c.fid,
- viewonly=True)})
+ 'foos': relationship(
+ Foo,
+ primaryjoin=foos_with_fks.c.id > foos_with_fks.c.fid,
+ viewonly=True)})
mapper(Bar, bars_with_fks)
sa.orm.configure_mappers()
@@ -3271,25 +3519,26 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
Foo, foos = self.classes.Foo, self.tables.foos
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid,
- viewonly=True,
- foreign_keys=[foos.c.fid])})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid,
+ viewonly=True,
+ foreign_keys=[foos.c.fid])})
sa.orm.configure_mappers()
eq_(Foo.foos.property.local_remote_pairs, [(foos.c.id, foos.c.fid)])
def test_equated(self):
- bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos_with_fks,
- self.classes.Foo,
- self.tables.foos)
+ bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3299,8 +3548,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos_with_fks.c.id==bars_with_fks.c.fid)})
+ 'bars': relationship(
+ Bar,
+ primaryjoin=foos_with_fks.c.id == bars_with_fks.c.fid)})
mapper(Bar, bars_with_fks)
sa.orm.configure_mappers()
@@ -3308,24 +3558,23 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
Foo, foos = self.classes.Foo, self.tables.foos
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id==foos.c.fid)})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id == foos.c.fid)})
self._assert_raises_no_relevant_fks(
configure_mappers,
"foos.id = foos.fid", "Foo.foos", "primary"
)
-
def test_equated_self_ref_wrong_fks(self):
bars, Foo, foos = (self.tables.bars,
- self.classes.Foo,
- self.tables.foos)
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id==foos.c.fid,
- foreign_keys=[bars.c.id])})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id == foos.c.fid,
+ foreign_keys=[bars.c.id])})
self._assert_raises_no_relevant_fks(
configure_mappers,
@@ -3333,7 +3582,8 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
)
-class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedTest):
+class InvalidRelationshipEscalationTestM2M(
+ _RelationshipErrors, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -3345,9 +3595,9 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
Column('id', Integer, primary_key=True))
Table('foobars_with_fks', metadata,
- Column('fid', Integer, ForeignKey('foos.id')),
- Column('bid', Integer, ForeignKey('bars.id'))
- )
+ Column('fid', Integer, ForeignKey('foos.id')),
+ Column('bid', Integer, ForeignKey('bars.id'))
+ )
Table('foobars_with_many_columns', metadata,
Column('fid', Integer),
@@ -3362,15 +3612,16 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def setup_classes(cls):
class Foo(cls.Basic):
pass
+
class Bar(cls.Basic):
pass
def test_no_join(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar, secondary=foobars)})
@@ -3384,15 +3635,15 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def test_no_secondaryjoin(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id > foobars.c.fid)})
+ secondary=foobars,
+ primaryjoin=foos.c.id > foobars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_join(
@@ -3402,17 +3653,18 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
)
def test_no_fks(self):
- foobars_with_many_columns, bars, Bar, foobars, Foo, foos = (self.tables.foobars_with_many_columns,
- self.tables.bars,
- self.classes.Bar,
- self.tables.foobars,
- self.classes.Foo,
- self.tables.foos)
+ foobars_with_many_columns, bars, Bar, foobars, Foo, foos = (
+ self.tables.foobars_with_many_columns,
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.foobars,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar, secondary=foobars,
- primaryjoin=foos.c.id==foobars.c.fid,
- secondaryjoin=foobars.c.bid==bars.c.id)})
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid == bars.c.id)})
mapper(Bar, bars)
sa.orm.configure_mappers()
eq_(
@@ -3426,12 +3678,13 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_many_columns,
- primaryjoin=foos.c.id ==
- foobars_with_many_columns.c.fid,
- secondaryjoin=foobars_with_many_columns.c.bid ==
- bars.c.id)})
+ 'bars': relationship(
+ Bar,
+ secondary=foobars_with_many_columns,
+ primaryjoin=foos.c.id ==
+ foobars_with_many_columns.c.fid,
+ secondaryjoin=foobars_with_many_columns.c.bid ==
+ bars.c.id)})
mapper(Bar, bars)
sa.orm.configure_mappers()
eq_(
@@ -3445,17 +3698,17 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def test_local_col_setup(self):
foobars_with_fks, bars, Bar, Foo, foos = (
- self.tables.foobars_with_fks,
- self.tables.bars,
- self.classes.Bar,
- self.classes.Foo,
- self.tables.foos)
+ self.tables.foobars_with_fks,
+ self.tables.bars,
+ self.classes.Bar,
+ self.classes.Foo,
+ self.tables.foos)
# ensure m2m backref is set up with correct annotations
# [ticket:2578]
mapper(Foo, foos, properties={
'bars': relationship(Bar, secondary=foobars_with_fks, backref="foos")
- })
+ })
mapper(Bar, bars)
sa.orm.configure_mappers()
eq_(
@@ -3467,65 +3720,66 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
set([bars.c.id])
)
-
-
def test_bad_primaryjoin(self):
- foobars_with_fks, bars, Bar, foobars, Foo, foos = (self.tables.foobars_with_fks,
- self.tables.bars,
- self.classes.Bar,
- self.tables.foobars,
- self.classes.Foo,
- self.tables.foos)
+ foobars_with_fks, bars, Bar, foobars, Foo, foos = (
+ self.tables.foobars_with_fks,
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.foobars,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id > foobars.c.fid,
- secondaryjoin=foobars.c.bid<=bars.c.id)})
+ secondary=foobars,
+ primaryjoin=foos.c.id > foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id)})
mapper(Bar, bars)
self._assert_raises_no_equality(
- configure_mappers,
- 'foos.id > foobars.fid',
- "Foo.bars",
- "primary")
+ configure_mappers,
+ 'foos.id > foobars.fid',
+ "Foo.bars",
+ "primary")
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_fks,
- primaryjoin=foos.c.id > foobars_with_fks.c.fid,
- secondaryjoin=foobars_with_fks.c.bid<=bars.c.id)})
+ 'bars': relationship(
+ Bar,
+ secondary=foobars_with_fks,
+ primaryjoin=foos.c.id > foobars_with_fks.c.fid,
+ secondaryjoin=foobars_with_fks.c.bid <= bars.c.id)})
mapper(Bar, bars)
self._assert_raises_no_equality(
- configure_mappers,
- 'foos.id > foobars_with_fks.fid',
- "Foo.bars",
- "primary")
+ configure_mappers,
+ 'foos.id > foobars_with_fks.fid',
+ "Foo.bars",
+ "primary")
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_fks,
- primaryjoin=foos.c.id > foobars_with_fks.c.fid,
- secondaryjoin=foobars_with_fks.c.bid<=bars.c.id,
- viewonly=True)})
+ 'bars': relationship(
+ Bar,
+ secondary=foobars_with_fks,
+ primaryjoin=foos.c.id > foobars_with_fks.c.fid,
+ secondaryjoin=foobars_with_fks.c.bid <= bars.c.id,
+ viewonly=True)})
mapper(Bar, bars)
sa.orm.configure_mappers()
def test_bad_secondaryjoin(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id == foobars.c.fid,
- secondaryjoin=foobars.c.bid <= bars.c.id,
- foreign_keys=[foobars.c.fid])})
+ 'bars': relationship(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id,
+ foreign_keys=[foobars.c.fid])})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
configure_mappers,
@@ -3536,17 +3790,17 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def test_no_equated_secondaryjoin(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id == foobars.c.fid,
- secondaryjoin=foobars.c.bid <= bars.c.id,
- foreign_keys=[foobars.c.fid, foobars.c.bid])})
+ 'bars': relationship(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id,
+ foreign_keys=[foobars.c.fid, foobars.c.bid])})
mapper(Bar, bars)
self._assert_raises_no_equality(
@@ -3556,6 +3810,7 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
"secondary"
)
+
class ActiveHistoryFlagTest(_fixtures.FixtureTest):
run_inserts = None
run_deletes = None
@@ -3572,27 +3827,27 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest):
setattr(obj, attrname, newvalue)
eq_(
attributes.get_history(obj, attrname),
- ([newvalue,], (), [oldvalue,])
+ ([newvalue, ], (), [oldvalue, ])
)
def test_column_property_flag(self):
User, users = self.classes.User, self.tables.users
mapper(User, users, properties={
- 'name':column_property(users.c.name,
- active_history=True)
+ 'name': column_property(users.c.name,
+ active_history=True)
})
u1 = User(name='jack')
self._test_attribute(u1, 'name', 'ed')
def test_relationship_property_flag(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses, properties={
- 'user':relationship(User, active_history=True)
+ 'user': relationship(User, active_history=True)
})
mapper(User, users)
u1 = User(name='jack')
@@ -3604,27 +3859,30 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest):
Order, orders = self.classes.Order, self.tables.orders
class MyComposite(object):
+
def __init__(self, description, isopen):
self.description = description
self.isopen = isopen
+
def __composite_values__(self):
return [self.description, self.isopen]
+
def __eq__(self, other):
return isinstance(other, MyComposite) and \
other.description == self.description
mapper(Order, orders, properties={
- 'composite':composite(
- MyComposite,
- orders.c.description,
- orders.c.isopen,
- active_history=True)
+ 'composite': composite(
+ MyComposite,
+ orders.c.description,
+ orders.c.isopen,
+ active_history=True)
})
o1 = Order(composite=MyComposite('foo', 1))
self._test_attribute(o1, "composite", MyComposite('bar', 1))
-
class RelationDeprecationTest(fixtures.MappedTest):
+
"""test usage of the old 'relation' function."""
run_inserts = 'once'
@@ -3655,34 +3913,32 @@ class RelationDeprecationTest(fixtures.MappedTest):
def fixtures(cls):
return dict(
users_table=(
- ('id', 'name'),
- (1, 'jack'),
- (2, 'ed'),
- (3, 'fred'),
- (4, 'chuck')),
+ ('id', 'name'),
+ (1, 'jack'),
+ (2, 'ed'),
+ (3, 'fred'),
+ (4, 'chuck')),
addresses_table=(
- ('id', 'user_id', 'email_address', 'purpose', 'bounces'),
- (1, 1, 'jack@jack.home', 'Personal', 0),
- (2, 1, 'jack@jack.bizz', 'Work', 1),
- (3, 2, 'ed@foo.bar', 'Personal', 0),
- (4, 3, 'fred@the.fred', 'Personal', 10)))
+ ('id', 'user_id', 'email_address', 'purpose', 'bounces'),
+ (1, 1, 'jack@jack.home', 'Personal', 0),
+ (2, 1, 'jack@jack.bizz', 'Work', 1),
+ (3, 2, 'ed@foo.bar', 'Personal', 0),
+ (4, 3, 'fred@the.fred', 'Personal', 10)))
def test_relation(self):
- addresses_table, User, users_table, Address = (self.tables.addresses_table,
- self.classes.User,
- self.tables.users_table,
- self.classes.Address)
+ addresses_table, User, users_table, Address = (
+ self.tables.addresses_table,
+ self.classes.User,
+ self.tables.users_table,
+ self.classes.Address)
mapper(User, users_table, properties=dict(
addresses=relation(Address, backref='user'),
- ))
+ ))
mapper(Address, addresses_table)
session = create_session()
- ed = session.query(User).filter(User.addresses.any(
+ session.query(User).filter(User.addresses.any(
Address.email_address == 'ed@foo.bar')).one()
-
-
-
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 74a7a7442..96728612d 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -18,194 +18,6 @@ from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy import event, ForeignKey
-class BindTest(_fixtures.FixtureTest):
- run_inserts = None
-
- def test_mapped_binds(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
-
- # ensure tables are unbound
- m2 = sa.MetaData()
- users_unbound = users.tometadata(m2)
- addresses_unbound = addresses.tometadata(m2)
-
- mapper(Address, addresses_unbound)
- mapper(User, users_unbound, properties={
- 'addresses': relationship(Address,
- backref=backref("user", cascade="all"),
- cascade="all")})
-
- sess = Session(binds={User: self.metadata.bind,
- Address: self.metadata.bind})
-
- u1 = User(id=1, name='ed')
- sess.add(u1)
- eq_(sess.query(User).filter(User.id == 1).all(),
- [User(id=1, name='ed')])
-
- # test expression binding
-
- sess.execute(users_unbound.insert(), params=dict(id=2,
- name='jack'))
- eq_(sess.execute(users_unbound.select(users_unbound.c.id
- == 2)).fetchall(), [(2, 'jack')])
-
- eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
- [(2, 'jack')])
-
- sess.execute(users_unbound.delete())
- eq_(sess.execute(users_unbound.select()).fetchall(), [])
-
- sess.close()
-
- def test_table_binds(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
-
- # ensure tables are unbound
- m2 = sa.MetaData()
- users_unbound = users.tometadata(m2)
- addresses_unbound = addresses.tometadata(m2)
-
- mapper(Address, addresses_unbound)
- mapper(User, users_unbound, properties={
- 'addresses': relationship(Address,
- backref=backref("user", cascade="all"),
- cascade="all")})
-
- Session = sessionmaker(binds={users_unbound: self.metadata.bind,
- addresses_unbound: self.metadata.bind})
- sess = Session()
-
- u1 = User(id=1, name='ed')
- sess.add(u1)
- eq_(sess.query(User).filter(User.id == 1).all(),
- [User(id=1, name='ed')])
-
- sess.execute(users_unbound.insert(), params=dict(id=2, name='jack'))
-
- eq_(sess.execute(users_unbound.select(users_unbound.c.id
- == 2)).fetchall(), [(2, 'jack')])
-
- eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
- [(2, 'jack')])
-
- sess.execute(users_unbound.delete())
- eq_(sess.execute(users_unbound.select()).fetchall(), [])
-
- sess.close()
-
- def test_bind_from_metadata(self):
- users, User = self.tables.users, self.classes.User
-
- mapper(User, users)
-
- session = create_session()
- session.execute(users.insert(), dict(name='Johnny'))
-
- assert len(session.query(User).filter_by(name='Johnny').all()) == 1
-
- session.execute(users.delete())
-
- assert len(session.query(User).filter_by(name='Johnny').all()) == 0
- session.close()
-
- def test_bind_arguments(self):
- users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
- mapper(User, users)
- mapper(Address, addresses)
-
- e1 = engines.testing_engine()
- e2 = engines.testing_engine()
- e3 = engines.testing_engine()
-
- sess = Session(e3)
- sess.bind_mapper(User, e1)
- sess.bind_mapper(Address, e2)
-
- assert sess.connection().engine is e3
- assert sess.connection(bind=e1).engine is e1
- assert sess.connection(mapper=Address, bind=e1).engine is e1
- assert sess.connection(mapper=Address).engine is e2
- assert sess.connection(clause=addresses.select()).engine is e2
- assert sess.connection(mapper=User,
- clause=addresses.select()).engine is e1
- assert sess.connection(mapper=User,
- clause=addresses.select(),
- bind=e2).engine is e2
-
- sess.close()
-
- @engines.close_open_connections
- def test_bound_connection(self):
- users, User = self.tables.users, self.classes.User
-
- mapper(User, users)
- c = testing.db.connect()
- sess = create_session(bind=c)
- sess.begin()
- transaction = sess.transaction
- u = User(name='u1')
- sess.add(u)
- sess.flush()
- assert transaction._connection_for_bind(testing.db) \
- is transaction._connection_for_bind(c) is c
-
- assert_raises_message(sa.exc.InvalidRequestError,
- 'Session already has a Connection '
- 'associated',
- transaction._connection_for_bind,
- testing.db.connect())
- transaction.rollback()
- assert len(sess.query(User).all()) == 0
- sess.close()
-
- def test_bound_connection_transactional(self):
- User, users = self.classes.User, self.tables.users
-
- mapper(User, users)
- c = testing.db.connect()
-
- sess = create_session(bind=c, autocommit=False)
- u = User(name='u1')
- sess.add(u)
- sess.flush()
- sess.close()
- assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 0
-
- sess = create_session(bind=c, autocommit=False)
- u = User(name='u2')
- sess.add(u)
- sess.flush()
- sess.commit()
- assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 1
- c.execute("delete from users")
- assert c.scalar("select count(1) from users") == 0
-
- c = testing.db.connect()
-
- trans = c.begin()
- sess = create_session(bind=c, autocommit=True)
- u = User(name='u3')
- sess.add(u)
- sess.flush()
- assert c.in_transaction()
- trans.commit()
- assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 1
class ExecutionTest(_fixtures.FixtureTest):
run_inserts = None
@@ -392,6 +204,7 @@ class SessionUtilTest(_fixtures.FixtureTest):
sess.flush()
make_transient(u1)
sess.rollback()
+ assert attributes.instance_state(u1).transient
def test_make_transient_to_detached(self):
users, User = self.tables.users, self.classes.User
@@ -849,7 +662,7 @@ class SessionStateTest(_fixtures.FixtureTest):
go()
eq_(canary, [False])
- def test_deleted_expunged(self):
+ def test_deleted_auto_expunged(self):
users, User = self.tables.users, self.classes.User
mapper(User, users)
@@ -870,6 +683,53 @@ class SessionStateTest(_fixtures.FixtureTest):
assert object_session(u1) is None
+ def test_explicit_expunge_pending(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+ sess = Session()
+ u1 = User(name='x')
+ sess.add(u1)
+
+ sess.flush()
+ sess.expunge(u1)
+
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+ sess.rollback()
+
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+ def test_explicit_expunge_deleted(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+ sess = Session()
+ sess.add(User(name='x'))
+ sess.commit()
+
+ u1 = sess.query(User).first()
+ sess.delete(u1)
+
+ sess.flush()
+
+ assert was_deleted(u1)
+ assert u1 not in sess
+ assert object_session(u1) is sess
+
+ sess.expunge(u1)
+ assert was_deleted(u1)
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+ sess.rollback()
+ assert was_deleted(u1)
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+
class SessionStateWFixtureTest(_fixtures.FixtureTest):
__backend__ = True
@@ -1591,14 +1451,19 @@ class SessionInterface(fixtures.TestBase):
eq_(watchdog, instance_methods,
watchdog.symmetric_difference(instance_methods))
- def _test_class_guards(self, user_arg):
+ def _test_class_guards(self, user_arg, is_class=True):
watchdog = set()
def raises_(method, *args, **kw):
watchdog.add(method)
callable_ = getattr(create_session(), method)
- assert_raises(sa.orm.exc.UnmappedClassError,
- callable_, *args, **kw)
+ if is_class:
+ assert_raises(
+ sa.orm.exc.UnmappedClassError,
+ callable_, *args, **kw)
+ else:
+ assert_raises(
+ sa.exc.NoInspectionAvailable, callable_, *args, **kw)
raises_('connection', mapper=user_arg)
@@ -1621,7 +1486,7 @@ class SessionInterface(fixtures.TestBase):
def test_unmapped_primitives(self):
for prim in ('doh', 123, ('t', 'u', 'p', 'l', 'e')):
self._test_instance_guards(prim)
- self._test_class_guards(prim)
+ self._test_class_guards(prim, is_class=False)
def test_unmapped_class_for_instance(self):
class Unmapped(object):
@@ -1645,7 +1510,7 @@ class SessionInterface(fixtures.TestBase):
self._map_it(Mapped)
self._test_instance_guards(early)
- self._test_class_guards(early)
+ self._test_class_guards(early, is_class=False)
class TLTransactionTest(fixtures.MappedTest):
diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py
index a54097b03..ae5a8ef60 100644
--- a/test/orm/test_unitofwork.py
+++ b/test/orm/test_unitofwork.py
@@ -2479,7 +2479,8 @@ class PartialNullPKTest(fixtures.MappedTest):
t1.col2 = 5
assert_raises_message(
orm_exc.FlushError,
- "Can't update table using NULL for primary key value",
+ "Can't update table t1 using NULL for primary "
+ "key value on column t1.col2",
s.commit
)
@@ -2492,7 +2493,8 @@ class PartialNullPKTest(fixtures.MappedTest):
t1.col3 = 'hi'
assert_raises_message(
orm_exc.FlushError,
- "Can't update table using NULL for primary key value",
+ "Can't update table t1 using NULL for primary "
+ "key value on column t1.col2",
s.commit
)
@@ -2505,7 +2507,8 @@ class PartialNullPKTest(fixtures.MappedTest):
s.delete(t1)
assert_raises_message(
orm_exc.FlushError,
- "Can't delete from table using NULL for primary key value",
+ "Can't delete from table t1 using NULL "
+ "for primary key value on column t1.col2",
s.commit
)
diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py
index 35d527ca8..a3ad37e60 100644
--- a/test/orm/test_update_delete.py
+++ b/test/orm/test_update_delete.py
@@ -1,9 +1,9 @@
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy.testing import fixtures
-from sqlalchemy import Integer, String, ForeignKey, or_, and_, exc, \
- select, func, Boolean, case, text
+from sqlalchemy import Integer, String, ForeignKey, or_, exc, \
+ select, func, Boolean, case, text, column
from sqlalchemy.orm import mapper, relationship, backref, Session, \
- joinedload, aliased
+ joinedload, synonym
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
@@ -18,7 +18,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(32)),
- Column('age', Integer))
+ Column('age_int', Integer))
@classmethod
def setup_classes(cls):
@@ -30,10 +30,10 @@ class UpdateDeleteTest(fixtures.MappedTest):
users = cls.tables.users
users.insert().execute([
- dict(id=1, name='john', age=25),
- dict(id=2, name='jack', age=47),
- dict(id=3, name='jill', age=29),
- dict(id=4, name='jane', age=37),
+ dict(id=1, name='john', age_int=25),
+ dict(id=2, name='jack', age_int=47),
+ dict(id=3, name='jill', age_int=29),
+ dict(id=4, name='jane', age_int=37),
])
@classmethod
@@ -41,7 +41,9 @@ class UpdateDeleteTest(fixtures.MappedTest):
User = cls.classes.User
users = cls.tables.users
- mapper(User, users)
+ mapper(User, users, properties={
+ 'age': users.c.age_int
+ })
def test_illegal_eval(self):
User = self.classes.User
@@ -70,14 +72,118 @@ class UpdateDeleteTest(fixtures.MappedTest):
):
assert_raises_message(
exc.InvalidRequestError,
- r"Can't call Query.update\(\) when %s\(\) has been called" % mname,
+ r"Can't call Query.update\(\) when "
+ "%s\(\) has been called" % mname,
q.update,
{'name': 'ed'})
assert_raises_message(
exc.InvalidRequestError,
- r"Can't call Query.delete\(\) when %s\(\) has been called" % mname,
+ r"Can't call Query.delete\(\) when "
+ "%s\(\) has been called" % mname,
q.delete)
+ def test_evaluate_clauseelement(self):
+ User = self.classes.User
+
+ class Thing(object):
+ def __clause_element__(self):
+ return User.name.__clause_element__()
+
+ s = Session()
+ jill = s.query(User).get(3)
+ s.query(User).update(
+ {Thing(): 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.name, 'moonbeam')
+
+ def test_evaluate_invalid(self):
+ User = self.classes.User
+
+ class Thing(object):
+ def __clause_element__(self):
+ return 5
+
+ s = Session()
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Invalid expression type: 5",
+ s.query(User).update, {Thing(): 'moonbeam'},
+ synchronize_session='evaluate'
+ )
+
+ def test_evaluate_unmapped_col(self):
+ User = self.classes.User
+
+ s = Session()
+ jill = s.query(User).get(3)
+ s.query(User).update(
+ {column('name'): 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.name, 'jill')
+ s.expire(jill)
+ eq_(jill.name, 'moonbeam')
+
+ def test_evaluate_synonym_string(self):
+ class Foo(object):
+ pass
+ mapper(Foo, self.tables.users, properties={
+ 'uname': synonym("name", )
+ })
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {'uname': 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.uname, 'moonbeam')
+
+ def test_evaluate_synonym_attr(self):
+ class Foo(object):
+ pass
+ mapper(Foo, self.tables.users, properties={
+ 'uname': synonym("name", )
+ })
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {Foo.uname: 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.uname, 'moonbeam')
+
+ def test_evaluate_double_synonym_attr(self):
+ class Foo(object):
+ pass
+ mapper(Foo, self.tables.users, properties={
+ 'uname': synonym("name"),
+ 'ufoo': synonym('uname')
+ })
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {Foo.ufoo: 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.ufoo, 'moonbeam')
+
+ def test_evaluate_hybrid_attr(self):
+ from sqlalchemy.ext.hybrid import hybrid_property
+
+ class Foo(object):
+ @hybrid_property
+ def uname(self):
+ return self.name
+
+ mapper(Foo, self.tables.users)
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {Foo.uname: 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.uname, 'moonbeam')
+
def test_delete(self):
User = self.classes.User
@@ -116,7 +222,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session='evaluate')
assert john not in sess and jill not in sess
sess.rollback()
@@ -127,7 +234,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session='fetch')
assert john not in sess and jill not in sess
sess.rollback()
@@ -139,7 +247,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session=False)
assert john in sess and jill in sess
@@ -152,7 +261,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session='fetch')
assert john not in sess and jill not in sess
@@ -202,7 +312,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter(User.age > 27).\
update(
- {users.c.age: User.age - 10}, synchronize_session='evaluate')
+ {users.c.age_int: User.age - 10},
+ synchronize_session='evaluate')
eq_([john.age, jack.age, jill.age, jane.age], [25, 27, 19, 27])
eq_(sess.query(User.age).order_by(
User.id).all(), list(zip([25, 27, 19, 27])))
@@ -213,12 +324,25 @@ class UpdateDeleteTest(fixtures.MappedTest):
eq_(sess.query(User.age).order_by(
User.id).all(), list(zip([15, 27, 19, 27])))
+ def test_update_against_table_col(self):
+ User, users = self.classes.User, self.tables.users
+
+ sess = Session()
+ john, jack, jill, jane = sess.query(User).order_by(User.id).all()
+ eq_([john.age, jack.age, jill.age, jane.age], [25, 47, 29, 37])
+ sess.query(User).filter(User.age > 27).\
+ update(
+ {users.c.age_int: User.age - 10},
+ synchronize_session='evaluate')
+ eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 19, 27])
+
def test_update_against_metadata(self):
User, users = self.classes.User, self.tables.users
sess = Session()
- sess.query(users).update({users.c.age: 29}, synchronize_session=False)
+ sess.query(users).update(
+ {users.c.age_int: 29}, synchronize_session=False)
eq_(sess.query(User.age).order_by(
User.id).all(), list(zip([29, 29, 29, 29])))
@@ -229,7 +353,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(text('age > :x')).params(x=29).\
+ sess.query(User).filter(text('age_int > :x')).params(x=29).\
update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27])
@@ -393,7 +517,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='j2').\
delete(
- synchronize_session='evaluate')
+ synchronize_session='evaluate')
assert john not in sess
def test_autoflush_before_fetch_delete(self):
@@ -405,7 +529,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='j2').\
delete(
- synchronize_session='fetch')
+ synchronize_session='fetch')
assert john not in sess
def test_evaluate_before_update(self):
@@ -447,7 +571,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='john').\
filter_by(age=25).\
delete(
- synchronize_session='evaluate')
+ synchronize_session='evaluate')
assert john not in sess
def test_fetch_before_delete(self):
@@ -460,7 +584,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='john').\
filter_by(age=25).\
delete(
- synchronize_session='fetch')
+ synchronize_session='fetch')
assert john not in sess
@@ -540,7 +664,8 @@ class UpdateDeleteIgnoresLoadersTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).options(joinedload(User.documents)).filter(User.age > 29).\
+ sess.query(User).options(
+ joinedload(User.documents)).filter(User.age > 29).\
update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27])
@@ -632,8 +757,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
set([
(1, True), (2, None),
(3, None), (4, True),
- (5, True), (6, None),
- ])
+ (5, True), (6, None)])
)
def test_no_eval_against_multi_table_criteria(self):
@@ -666,8 +790,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
set([
(1, True), (2, None),
(3, None), (4, True),
- (5, True), (6, None),
- ])
+ (5, True), (6, None)])
)
@testing.requires.update_where_target_in_subquery
@@ -690,8 +813,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
set([
(1, True), (2, False),
(3, False), (4, True),
- (5, True), (6, False),
- ])
+ (5, True), (6, False)])
)
@testing.only_on('mysql', 'Multi table update')
@@ -706,8 +828,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
filter(User.id == 2).update({
Document.samename: 'd_samename',
User.samename: 'u_samename'
- }, synchronize_session=False
- )
+ }, synchronize_session=False)
eq_(
s.query(User.id, Document.samename, User.samename).
filter(User.id == Document.user_id).
diff --git a/test/profiles.txt b/test/profiles.txt
index 12222b637..97ef13873 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -13,22 +13,16 @@
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_cextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_cextensions 77
@@ -36,22 +30,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_noc
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_cextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_nocextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_cextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_cextensions 165
@@ -59,22 +47,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_noc
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_cextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_nocextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_cextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_cextensions 199
@@ -82,22 +64,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysql
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_cextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_nocextensions 79
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 79
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 79
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_cextensions 80
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_nocextensions 80
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_cextensions 80
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_nocextensions 80
test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_cextensions 78
@@ -105,22 +81,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_noc
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_cextensions 148
@@ -134,8 +104,6 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgre
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_nocextensions 4265
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4265
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4260
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_nocextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_nocextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_cextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_nocextensions 4266
@@ -150,8 +118,6 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_nocextensions 6426
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6426
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_nocextensions 6428
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_nocextensions 6428
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_cextensions 6428
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_nocextensions 6428
@@ -166,8 +132,6 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycop
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 40149
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 19280
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 28297
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_cextensions 107603
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_nocextensions 116606
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 29138
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 32398
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 37327
@@ -182,8 +146,6 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 30054
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 27144
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 30149
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_cextensions 53281
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_nocextensions 56284
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 29068
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 32197
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 31179
@@ -198,8 +160,6 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_nocextensions 17988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_nocextensions 18988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18988
@@ -214,8 +174,6 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 122553
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 162315
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 165111
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_cextensions 200102
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_nocextensions 201852
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 125352
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 169566
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 171364
@@ -230,8 +188,6 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19219
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 22288
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 22530
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_cextensions 24956
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_nocextensions 24936
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 19492
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23067
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 23271
@@ -246,8 +202,6 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_ce
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1348
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1601
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1626
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_cextensions 2215
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_nocextensions 2230
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1355
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1656
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1671
@@ -262,8 +216,6 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 117,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 117,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 117,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_cextensions 122,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_nocextensions 122,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 122,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 122,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 122,19
@@ -278,8 +230,6 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psy
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 91
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 91
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 91
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_nocextensions 78
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 78
@@ -294,8 +244,6 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_ps
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_nocextensions 31
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 31
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_nocextensions 24
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 24
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 24
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 24
@@ -310,8 +258,6 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_po
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_nocextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_nocextensions 9
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 9
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 9
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 9
@@ -320,22 +266,16 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_po
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_nocextensions 45
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 45
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 45
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_cextensions 43
@@ -343,22 +283,16 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_nocextensions 80
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 80
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 80
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 80
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_cextensions 78
@@ -366,22 +300,16 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_cextensions 16
@@ -389,22 +317,16 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_cextensions 92959
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_nocextensions 107979
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 514
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15534
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20501
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35521
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 457
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15477
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_cextensions 109136
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_nocextensions 123136
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 462
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14462
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_cextensions 79876
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_nocextensions 93876
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 462
@@ -412,22 +334,16 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_cextensions 92959
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_nocextensions 107979
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 514
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45534
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20501
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35521
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 457
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15477
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_cextensions 109136
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_nocextensions 123136
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 462
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14462
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_cextensions 79876
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_nocextensions 93876
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 462
@@ -437,16 +353,16 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5562,277,3697,11893,1106,1968,2433
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 5606,277,3929,13595,1223,2011,2692
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5238,259,3577,11529,1077,1886,2439
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5260,259,3673,12701,1171,1893,2631
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5221,259,3577,11529,1077,1883,2439
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5243,259,3673,12701,1171,1890,2631
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5238,273,3577,11529,1077,1886,2439
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5260,273,3673,12701,1171,1893,2631
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5221,273,3577,11529,1077,1883,2439
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5243,273,3697,12796,1187,1923,2653
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6098,399,6666,18183,1118,2606
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 6169,404,6898,19614,1226,2671
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 6008,386,6716,18339,1091,2630
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 6093,391,6820,19366,1177,2659
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6007,386,6716,18339,1091,2630
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6087,391,6820,19366,1177,2659
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6389,407,6826,18499,1134,2661
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 6480,412,7058,19930,1242,2726
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 6268,394,6860,18613,1107,2679
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 6361,399,6964,19640,1193,2708
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6275,394,6860,18613,1107,2679
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6360,399,6964,19640,1193,2708
diff --git a/test/requirements.py b/test/requirements.py
index 80bd135e9..05ca8d717 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -421,6 +421,12 @@ class DefaultRequirements(SuiteRequirements):
no_support('sybase', 'FIXME: guessing, needs confirmation'),
no_support('mssql+pymssql', 'no FreeTDS support'),
LambdaPredicate(
+ lambda config: against(config, "mysql+mysqlconnector") and
+ config.db.dialect._mysqlconnector_version_info > (2, 0) and
+ util.py2k,
+ "bug in mysqlconnector 2.0"
+ ),
+ LambdaPredicate(
lambda config: against(config, 'mssql+pyodbc') and
config.db.dialect.freetds and
config.db.dialect.freetds_driver_version < "0.91",
@@ -443,7 +449,7 @@ class DefaultRequirements(SuiteRequirements):
after an insert() construct executes.
"""
return fails_on_everything_except('mysql',
- 'sqlite+pysqlite',
+ 'sqlite+pysqlite', 'sqlite+pysqlcipher',
'sybase', 'mssql')
@property
@@ -460,7 +466,7 @@ class DefaultRequirements(SuiteRequirements):
"""
return skip_if('mssql+pymssql', 'crashes on pymssql') + \
fails_on_everything_except('mysql',
- 'sqlite+pysqlite')
+ 'sqlite+pysqlite', 'sqlite+pysqlcipher')
@property
def sane_multi_rowcount(self):
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 3e6b87351..9e99a947b 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -238,6 +238,22 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
checkparams=params
)
+ def test_limit_offset_select_literal_binds(self):
+ stmt = select([1]).limit(5).offset(6)
+ self.assert_compile(
+ stmt,
+ "SELECT 1 LIMIT 5 OFFSET 6",
+ literal_binds=True
+ )
+
+ def test_limit_offset_compound_select_literal_binds(self):
+ stmt = select([1]).union(select([2])).limit(5).offset(6)
+ self.assert_compile(
+ stmt,
+ "SELECT 1 UNION SELECT 2 LIMIT 5 OFFSET 6",
+ literal_binds=True
+ )
+
def test_select_precol_compile_ordering(self):
s1 = select([column('x')]).select_from(text('a')).limit(5).as_scalar()
s2 = select([s1]).limit(10)
@@ -419,6 +435,19 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=default.DefaultDialect(paramstyle='pyformat')
)
+ def test_anon_param_name_on_keys(self):
+ self.assert_compile(
+ keyed.insert(),
+ "INSERT INTO keyed (x, y, z) VALUES (%(colx)s, %(coly)s, %(z)s)",
+ dialect=default.DefaultDialect(paramstyle='pyformat')
+ )
+ self.assert_compile(
+ keyed.c.coly == 5,
+ "keyed.y = %(coly_1)s",
+ checkparams={'coly_1': 5},
+ dialect=default.DefaultDialect(paramstyle='pyformat')
+ )
+
def test_dupe_columns(self):
"""test that deduping is performed against clause
element identity, not rendered result."""
@@ -3408,3 +3437,32 @@ class ResultMapTest(fixtures.TestBase):
is_(
comp.result_map['t1_a'][1][2], t1.c.a
)
+
+ def test_insert_with_select_values(self):
+ astring = Column('a', String)
+ aint = Column('a', Integer)
+ m = MetaData()
+ Table('t1', m, astring)
+ t2 = Table('t2', m, aint)
+
+ stmt = t2.insert().values(a=select([astring])).returning(aint)
+ comp = stmt.compile(dialect=postgresql.dialect())
+ eq_(
+ comp.result_map,
+ {'a': ('a', (aint, 'a', 'a'), aint.type)}
+ )
+
+ def test_insert_from_select(self):
+ astring = Column('a', String)
+ aint = Column('a', Integer)
+ m = MetaData()
+ Table('t1', m, astring)
+ t2 = Table('t2', m, aint)
+
+ stmt = t2.insert().from_select(['a'], select([astring])).\
+ returning(aint)
+ comp = stmt.compile(dialect=postgresql.dialect())
+ eq_(
+ comp.result_map,
+ {'a': ('a', (aint, 'a', 'a'), aint.type)}
+ )
diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py
index abce600df..10e557b76 100644
--- a/test/sql/test_defaults.py
+++ b/test/sql/test_defaults.py
@@ -14,6 +14,7 @@ from sqlalchemy.dialects import sqlite
from sqlalchemy.testing import fixtures
from sqlalchemy.util import u, b
from sqlalchemy import util
+import itertools
t = f = f2 = ts = currenttime = metadata = default_generator = None
@@ -1278,3 +1279,67 @@ class UnicodeDefaultsTest(fixtures.TestBase):
"foobar", Unicode(32),
default=default
)
+
+
+class InsertFromSelectTest(fixtures.TestBase):
+ __backend__ = True
+
+ def _fixture(self):
+ data = Table(
+ 'data', self.metadata,
+ Column('x', Integer),
+ Column('y', Integer)
+ )
+ data.create()
+ testing.db.execute(data.insert(), {'x': 2, 'y': 5}, {'x': 7, 'y': 12})
+ return data
+
+ @testing.provide_metadata
+ def test_insert_from_select_override_defaults(self):
+ data = self._fixture()
+
+ table = Table('sometable', self.metadata,
+ Column('x', Integer),
+ Column('foo', Integer, default=12),
+ Column('y', Integer))
+
+ table.create()
+
+ sel = select([data.c.x, data.c.y])
+
+ ins = table.insert().\
+ from_select(["x", "y"], sel)
+ testing.db.execute(ins)
+
+ eq_(
+ testing.db.execute(table.select().order_by(table.c.x)).fetchall(),
+ [(2, 12, 5), (7, 12, 12)]
+ )
+
+ @testing.provide_metadata
+ def test_insert_from_select_fn_defaults(self):
+ data = self._fixture()
+
+ counter = itertools.count(1)
+
+ def foo(ctx):
+ return next(counter)
+
+ table = Table('sometable', self.metadata,
+ Column('x', Integer),
+ Column('foo', Integer, default=foo),
+ Column('y', Integer))
+
+ table.create()
+
+ sel = select([data.c.x, data.c.y])
+
+ ins = table.insert().\
+ from_select(["x", "y"], sel)
+ testing.db.execute(ins)
+
+ # counter is only called once!
+ eq_(
+ testing.db.execute(table.select().order_by(table.c.x)).fetchall(),
+ [(2, 1, 5), (7, 1, 12)]
+ )
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index 9b7649e63..ec8d9b5c0 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -1,7 +1,8 @@
from sqlalchemy.testing import eq_
import datetime
from sqlalchemy import func, select, Integer, literal, DateTime, Table, \
- Column, Sequence, MetaData, extract, Date, String, bindparam
+ Column, Sequence, MetaData, extract, Date, String, bindparam, \
+ literal_column
from sqlalchemy.sql import table, column
from sqlalchemy import sql, util
from sqlalchemy.sql.compiler import BIND_TEMPLATES
@@ -15,6 +16,13 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL, engines
from sqlalchemy.dialects import sqlite, postgresql, mysql, oracle
+table1 = table('mytable',
+ column('myid', Integer),
+ column('name', String),
+ column('description', String),
+ )
+
+
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -367,6 +375,108 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
expr = func.rows("foo").alias('bar')
assert len(expr.c)
+ def test_funcfilter_empty(self):
+ self.assert_compile(
+ func.count(1).filter(),
+ "count(:param_1)"
+ )
+
+ def test_funcfilter_criterion(self):
+ self.assert_compile(
+ func.count(1).filter(
+ table1.c.name != None
+ ),
+ "count(:param_1) FILTER (WHERE mytable.name IS NOT NULL)"
+ )
+
+ def test_funcfilter_compound_criterion(self):
+ self.assert_compile(
+ func.count(1).filter(
+ table1.c.name == None,
+ table1.c.myid > 0
+ ),
+ "count(:param_1) FILTER (WHERE mytable.name IS NULL AND "
+ "mytable.myid > :myid_1)"
+ )
+
+ def test_funcfilter_label(self):
+ self.assert_compile(
+ select([func.count(1).filter(
+ table1.c.description != None
+ ).label('foo')]),
+ "SELECT count(:param_1) FILTER (WHERE mytable.description "
+ "IS NOT NULL) AS foo FROM mytable"
+ )
+
+ def test_funcfilter_fromobj_fromfunc(self):
+ # test from_obj generation.
+ # from func:
+ self.assert_compile(
+ select([
+ func.max(table1.c.name).filter(
+ literal_column('description') != None
+ )
+ ]),
+ "SELECT max(mytable.name) FILTER (WHERE description "
+ "IS NOT NULL) AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_fromobj_fromcriterion(self):
+ # from criterion:
+ self.assert_compile(
+ select([
+ func.count(1).filter(
+ table1.c.name == 'name'
+ )
+ ]),
+ "SELECT count(:param_1) FILTER (WHERE mytable.name = :name_1) "
+ "AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_chaining(self):
+ # test chaining:
+ self.assert_compile(
+ select([
+ func.count(1).filter(
+ table1.c.name == 'name'
+ ).filter(
+ table1.c.description == 'description'
+ )
+ ]),
+ "SELECT count(:param_1) FILTER (WHERE "
+ "mytable.name = :name_1 AND mytable.description = :description_1) "
+ "AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_windowing_orderby(self):
+ # test filtered windowing:
+ self.assert_compile(
+ select([
+ func.rank().filter(
+ table1.c.name > 'foo'
+ ).over(
+ order_by=table1.c.name
+ )
+ ]),
+ "SELECT rank() FILTER (WHERE mytable.name > :name_1) "
+ "OVER (ORDER BY mytable.name) AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_windowing_orderby_partitionby(self):
+ self.assert_compile(
+ select([
+ func.rank().filter(
+ table1.c.name > 'foo'
+ ).over(
+ order_by=table1.c.name,
+ partition_by=['description']
+ )
+ ]),
+ "SELECT rank() FILTER (WHERE mytable.name > :name_1) "
+ "OVER (PARTITION BY mytable.description ORDER BY mytable.name) "
+ "AS anon_1 FROM mytable"
+ )
+
class ExecuteTest(fixtures.TestBase):
diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py
index 013ba8082..6b86614e6 100644
--- a/test/sql/test_generative.py
+++ b/test/sql/test_generative.py
@@ -132,6 +132,19 @@ class TraversalTest(fixtures.TestBase, AssertsExecutionResults):
assert struct == s2
assert struct.is_other(s2)
+ def test_clone_anon_label(self):
+ from sqlalchemy.sql.elements import Grouping
+ c1 = Grouping(literal_column('q'))
+ s1 = select([c1])
+
+ class Vis(CloningVisitor):
+ def visit_grouping(self, elem):
+ pass
+
+ vis = Vis()
+ s2 = vis.traverse(s1)
+ eq_(list(s2.inner_columns)[0].anon_label, c1.anon_label)
+
def test_change_in_place(self):
struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
A("expr2b")), A("expr3"))
@@ -539,6 +552,11 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
expr2 = CloningVisitor().traverse(expr)
assert str(expr) == str(expr2)
+ def test_funcfilter(self):
+ expr = func.count(1).filter(t1.c.col1 > 1)
+ expr2 = CloningVisitor().traverse(expr)
+ assert str(expr) == str(expr2)
+
def test_adapt_union(self):
u = union(
t1.select().where(t1.c.col1 == 4),
diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py
index 232c5758b..bd4eaa3e2 100644
--- a/test/sql/test_insert.py
+++ b/test/sql/test_insert.py
@@ -183,7 +183,7 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
- def test_insert_from_select_select_no_defaults(self):
+ def test_insert_from_select_no_defaults(self):
metadata = MetaData()
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
@@ -191,7 +191,7 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
table1 = self.tables.mytable
sel = select([table1.c.myid]).where(table1.c.name == 'foo')
ins = table.insert().\
- from_select(["id"], sel)
+ from_select(["id"], sel, include_defaults=False)
self.assert_compile(
ins,
"INSERT INTO sometable (id) SELECT mytable.myid "
@@ -199,6 +199,84 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
+ def test_insert_from_select_with_sql_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=func.foobar()))
+ table1 = self.tables.mytable
+ sel = select([table1.c.myid]).where(table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, foobar() AS foobar_1 "
+ "FROM mytable WHERE mytable.name = :name_1",
+ checkparams={"name_1": "foo"}
+ )
+
+ def test_insert_from_select_with_python_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=12))
+ table1 = self.tables.mytable
+ sel = select([table1.c.myid]).where(table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, :foo AS anon_1 "
+ "FROM mytable WHERE mytable.name = :name_1",
+ # value filled in at execution time
+ checkparams={"name_1": "foo", "foo": None}
+ )
+
+ def test_insert_from_select_override_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=12))
+ table1 = self.tables.mytable
+ sel = select(
+ [table1.c.myid, table1.c.myid.label('q')]).where(
+ table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id", "foo"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, mytable.myid AS q "
+ "FROM mytable WHERE mytable.name = :name_1",
+ checkparams={"name_1": "foo"}
+ )
+
+ def test_insert_from_select_fn_defaults(self):
+ metadata = MetaData()
+
+ def foo(ctx):
+ return 12
+
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=foo))
+ table1 = self.tables.mytable
+ sel = select(
+ [table1.c.myid]).where(
+ table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, :foo AS anon_1 "
+ "FROM mytable WHERE mytable.name = :name_1",
+ # value filled in at execution time
+ checkparams={"name_1": "foo", "foo": None}
+ )
+
def test_insert_mix_select_values_exception(self):
table1 = self.tables.mytable
sel = select([table1.c.myid, table1.c.name]).where(
diff --git a/test/sql/test_join_rewriting.py b/test/sql/test_join_rewriting.py
index c8b24e2f2..ced65d7f1 100644
--- a/test/sql/test_join_rewriting.py
+++ b/test/sql/test_join_rewriting.py
@@ -251,6 +251,16 @@ class _JoinRewriteTestBase(AssertsCompiledSQL):
self._f_b1a_where_in_b2a
)
+ def test_anon_scalar_subqueries(self):
+ s1 = select([1]).as_scalar()
+ s2 = select([2]).as_scalar()
+
+ s = select([s1, s2]).apply_labels()
+ self._test(
+ s,
+ self._anon_scalar_subqueries
+ )
+
class JoinRewriteTest(_JoinRewriteTestBase, fixtures.TestBase):
@@ -389,6 +399,10 @@ class JoinRewriteTest(_JoinRewriteTestBase, fixtures.TestBase):
"FROM a JOIN b2 ON a.id = b2.a_id)"
)
+ _anon_scalar_subqueries = (
+ "SELECT (SELECT 1) AS anon_1, (SELECT 2) AS anon_2"
+ )
+
class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase):
@@ -497,6 +511,10 @@ class JoinPlainTest(_JoinRewriteTestBase, fixtures.TestBase):
"FROM a JOIN b2 ON a.id = b2.a_id)"
)
+ _anon_scalar_subqueries = (
+ "SELECT (SELECT 1) AS anon_1, (SELECT 2) AS anon_2"
+ )
+
class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase):
@@ -605,6 +623,10 @@ class JoinNoUseLabelsTest(_JoinRewriteTestBase, fixtures.TestBase):
"FROM a JOIN b2 ON a.id = b2.a_id)"
)
+ _anon_scalar_subqueries = (
+ "SELECT (SELECT 1) AS anon_1, (SELECT 2) AS anon_2"
+ )
+
class JoinExecTest(_JoinRewriteTestBase, fixtures.TestBase):
@@ -615,7 +637,8 @@ class JoinExecTest(_JoinRewriteTestBase, fixtures.TestBase):
_a_bc = _a_bc_comma_a1_selbc = _a__b_dc = _a_bkeyassoc = \
_a_bkeyassoc_aliased = _a_atobalias_balias_c_w_exists = \
_a_atobalias_balias = _b_ab1_union_c_ab2 = \
- _b_a_id_double_overlap_annotated = _f_b1a_where_in_b2a = None
+ _b_a_id_double_overlap_annotated = _f_b1a_where_in_b2a = \
+ _anon_scalar_subqueries = None
@classmethod
def setup_class(cls):
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index 6b8e1bb40..3f24fd07d 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -227,6 +227,50 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
fk1 = ForeignKeyConstraint(('foo', ), ('bar', ), table=t1)
assert fk1 in t1.constraints
+ def test_fk_constraint_col_collection_w_table(self):
+ c1 = Column('foo', Integer)
+ c2 = Column('bar', Integer)
+ m = MetaData()
+ t1 = Table('t', m, c1, c2)
+ fk1 = ForeignKeyConstraint(('foo', ), ('bar', ), table=t1)
+ eq_(dict(fk1.columns), {"foo": c1})
+
+ def test_fk_constraint_col_collection_no_table(self):
+ fk1 = ForeignKeyConstraint(('foo', 'bat'), ('bar', 'hoho'))
+ eq_(dict(fk1.columns), {})
+ eq_(fk1.column_keys, ['foo', 'bat'])
+ eq_(fk1._col_description, 'foo, bat')
+ eq_(fk1._elements, {"foo": fk1.elements[0], "bat": fk1.elements[1]})
+
+ def test_fk_constraint_col_collection_no_table_real_cols(self):
+ c1 = Column('foo', Integer)
+ c2 = Column('bar', Integer)
+ fk1 = ForeignKeyConstraint((c1, ), (c2, ))
+ eq_(dict(fk1.columns), {})
+ eq_(fk1.column_keys, ['foo'])
+ eq_(fk1._col_description, 'foo')
+ eq_(fk1._elements, {"foo": fk1.elements[0]})
+
+ def test_fk_constraint_col_collection_added_to_table(self):
+ c1 = Column('foo', Integer)
+ m = MetaData()
+ fk1 = ForeignKeyConstraint(('foo', ), ('bar', ))
+ Table('t', m, c1, fk1)
+ eq_(dict(fk1.columns), {"foo": c1})
+ eq_(fk1._elements, {"foo": fk1.elements[0]})
+
+ def test_fk_constraint_col_collection_via_fk(self):
+ fk = ForeignKey('bar')
+ c1 = Column('foo', Integer, fk)
+ m = MetaData()
+ t1 = Table('t', m, c1)
+ fk1 = fk.constraint
+ eq_(fk1.column_keys, ['foo'])
+ assert fk1 in t1.constraints
+ eq_(fk1.column_keys, ['foo'])
+ eq_(dict(fk1.columns), {"foo": c1})
+ eq_(fk1._elements, {"foo": fk})
+
def test_fk_no_such_parent_col_error(self):
meta = MetaData()
a = Table('a', meta, Column('a', Integer))
@@ -678,6 +722,86 @@ class ToMetaDataTest(fixtures.TestBase, ComparesTables):
eq_(str(table_c.join(table2_c).onclause),
'myschema.mytable.myid = myschema.othertable.myid')
+ def test_change_name_retain_metadata(self):
+ meta = MetaData()
+
+ table = Table('mytable', meta,
+ Column('myid', Integer, primary_key=True),
+ Column('name', String(40), nullable=True),
+ Column('description', String(30),
+ CheckConstraint("description='hi'")),
+ UniqueConstraint('name'),
+ schema='myschema',
+ )
+
+ table2 = table.tometadata(table.metadata, name='newtable')
+ table3 = table.tometadata(table.metadata, schema='newschema',
+ name='newtable')
+
+ assert table.metadata is table2.metadata
+ assert table.metadata is table3.metadata
+ eq_((table.name, table2.name, table3.name),
+ ('mytable', 'newtable', 'newtable'))
+ eq_((table.key, table2.key, table3.key),
+ ('myschema.mytable', 'myschema.newtable', 'newschema.newtable'))
+
+ def test_change_name_change_metadata(self):
+ meta = MetaData()
+ meta2 = MetaData()
+
+ table = Table('mytable', meta,
+ Column('myid', Integer, primary_key=True),
+ Column('name', String(40), nullable=True),
+ Column('description', String(30),
+ CheckConstraint("description='hi'")),
+ UniqueConstraint('name'),
+ schema='myschema',
+ )
+
+ table2 = table.tometadata(meta2, name='newtable')
+
+ assert table.metadata is not table2.metadata
+ eq_((table.name, table2.name),
+ ('mytable', 'newtable'))
+ eq_((table.key, table2.key),
+ ('myschema.mytable', 'myschema.newtable'))
+
+ def test_change_name_selfref_fk_moves(self):
+ meta = MetaData()
+
+ referenced = Table('ref', meta,
+ Column('id', Integer, primary_key=True),
+ )
+ table = Table('mytable', meta,
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', ForeignKey('mytable.id')),
+ Column('ref_id', ForeignKey('ref.id'))
+ )
+
+ table2 = table.tometadata(table.metadata, name='newtable')
+ assert table.metadata is table2.metadata
+ assert table2.c.ref_id.references(referenced.c.id)
+ assert table2.c.parent_id.references(table2.c.id)
+
+ def test_change_name_selfref_fk_moves_w_schema(self):
+ meta = MetaData()
+
+ referenced = Table('ref', meta,
+ Column('id', Integer, primary_key=True),
+ )
+ table = Table('mytable', meta,
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', ForeignKey('mytable.id')),
+ Column('ref_id', ForeignKey('ref.id'))
+ )
+
+ table2 = table.tometadata(
+ table.metadata, name='newtable', schema='newschema')
+ ref2 = referenced.tometadata(table.metadata, schema='newschema')
+ assert table.metadata is table2.metadata
+ assert table2.c.ref_id.references(ref2.c.id)
+ assert table2.c.parent_id.references(table2.c.id)
+
def _assert_fk(self, t2, schema, expected, referred_schema_fn=None):
m2 = MetaData()
existing_schema = t2.schema
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index 5c401845b..e8ad88511 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -1,4 +1,4 @@
-from sqlalchemy.testing import fixtures, eq_, is_
+from sqlalchemy.testing import fixtures, eq_, is_, is_not_
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.sql import column, desc, asc, literal, collate, null, true, false
@@ -778,6 +778,25 @@ class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"SELECT x WHERE NOT NULL"
)
+ def test_constant_non_singleton(self):
+ is_not_(null(), null())
+ is_not_(false(), false())
+ is_not_(true(), true())
+
+ def test_constant_render_distinct(self):
+ self.assert_compile(
+ select([null(), null()]),
+ "SELECT NULL AS anon_1, NULL AS anon_2"
+ )
+ self.assert_compile(
+ select([true(), true()]),
+ "SELECT true AS anon_1, true AS anon_2"
+ )
+ self.assert_compile(
+ select([false(), false()]),
+ "SELECT false AS anon_1, false AS anon_2"
+ )
+
class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 430c3fe7c..2f13486eb 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -81,11 +81,10 @@ class QueryTest(fixtures.TestBase):
assert_raises_message(
exc.StatementError,
- r"A value is required for bind parameter 'user_name', in "
+ r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for "
+ "bind parameter 'user_name', in "
"parameter group 2 "
- "\(original cause: (sqlalchemy.exc.)?InvalidRequestError: A "
- "value is required for bind parameter 'user_name', in "
- "parameter group 2\) u?'INSERT INTO query_users",
+ r"\[SQL: u?'INSERT INTO query_users",
users.insert().execute,
{'user_id': 7, 'user_name': 'jack'},
{'user_id': 8, 'user_name': 'ed'},
@@ -295,9 +294,6 @@ class QueryTest(fixtures.TestBase):
l.append(row)
self.assert_(len(l) == 3)
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.requires.subqueries
def test_anonymous_rows(self):
users.insert().execute(
@@ -509,9 +505,6 @@ class QueryTest(fixtures.TestBase):
lambda: row[accessor]
)
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.requires.boolean_col_expressions
def test_or_and_as_columns(self):
true, false = literal(True), literal(False)
@@ -570,9 +563,6 @@ class QueryTest(fixtures.TestBase):
):
eq_(expr.execute().fetchall(), result)
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.requires.mod_operator_as_percent_sign
@testing.emits_warning('.*now automatically escapes.*')
def test_percents_in_text(self):
@@ -623,9 +613,6 @@ class QueryTest(fixtures.TestBase):
c = testing.db.connect()
assert c.execute(s, id=7).fetchall()[0]['user_id'] == 7
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
def test_repeated_bindparams(self):
"""Tests that a BindParam can be used more than once.
@@ -1319,9 +1306,6 @@ class QueryTest(fixtures.TestBase):
# Null values are not outside any set
assert len(r) == 0
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.emits_warning('.*empty sequence.*')
@testing.fails_on('firebird', "uses sql-92 rules")
@testing.fails_on('sybase', "uses sql-92 rules")
@@ -1348,9 +1332,6 @@ class QueryTest(fixtures.TestBase):
r = s.execute(search_key=None).fetchall()
assert len(r) == 0
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.emits_warning('.*empty sequence.*')
def test_literal_in(self):
"""similar to test_bind_in but use a bind with a value."""
@@ -2510,9 +2491,6 @@ class OperatorTest(fixtures.TestBase):
metadata.drop_all()
# TODO: seems like more tests warranted for this setup.
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
def test_modulo(self):
eq_(
select([flds.c.intcol % 3],
diff --git a/test/sql/test_returning.py b/test/sql/test_returning.py
index 79a0b38a5..cd9f632b9 100644
--- a/test/sql/test_returning.py
+++ b/test/sql/test_returning.py
@@ -160,6 +160,39 @@ class ReturningTest(fixtures.TestBase, AssertsExecutionResults):
eq_(result2.fetchall(), [(2, False), ])
+class CompositeStatementTest(fixtures.TestBase):
+ __requires__ = 'returning',
+ __backend__ = True
+
+ @testing.provide_metadata
+ def test_select_doesnt_pollute_result(self):
+ class MyType(TypeDecorator):
+ impl = Integer
+
+ def process_result_value(self, value, dialect):
+ raise Exception("I have not been selected")
+
+ t1 = Table(
+ 't1', self.metadata,
+ Column('x', MyType())
+ )
+
+ t2 = Table(
+ 't2', self.metadata,
+ Column('x', Integer)
+ )
+
+ self.metadata.create_all(testing.db)
+ with testing.db.connect() as conn:
+ conn.execute(t1.insert().values(x=5))
+
+ stmt = t2.insert().values(
+ x=select([t1.c.x]).as_scalar()).returning(t2.c.x)
+
+ result = conn.execute(stmt)
+ eq_(result.scalar(), 5)
+
+
class SequenceReturningTest(fixtures.TestBase):
__requires__ = 'returning', 'sequences'
__backend__ = True
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index a3b2b0e93..99d0cbe76 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -5,6 +5,7 @@ from sqlalchemy.testing import eq_, assert_raises, \
from sqlalchemy import *
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
AssertsExecutionResults
+from sqlalchemy.sql import elements
from sqlalchemy import testing
from sqlalchemy.sql import util as sql_util, visitors, expression
from sqlalchemy import exc
@@ -1934,6 +1935,29 @@ class AnnotationsTest(fixtures.TestBase):
assert (c2 == 5).left._annotations == {"foo": "bar", "bat": "hoho"}
+class ReprTest(fixtures.TestBase):
+ def test_ensure_repr_elements(self):
+ for obj in [
+ elements.Cast(1, 2),
+ elements.TypeClause(String()),
+ elements.ColumnClause('x'),
+ elements.BindParameter('q'),
+ elements.Null(),
+ elements.True_(),
+ elements.False_(),
+ elements.ClauseList(),
+ elements.BooleanClauseList.and_(),
+ elements.Tuple(),
+ elements.Case([]),
+ elements.Extract('foo', column('x')),
+ elements.UnaryExpression(column('x')),
+ elements.Grouping(column('x')),
+ elements.Over(func.foo()),
+ elements.Label('q', column('x')),
+ ]:
+ repr(obj)
+
+
class WithLabelsTest(fixtures.TestBase):
def _assert_labels_warning(self, s):