summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.coveragerc5
-rw-r--r--.gitignore2
-rw-r--r--doc/build/changelog/changelog_08.rst4
-rw-r--r--doc/build/changelog/changelog_09.rst188
-rw-r--r--doc/build/changelog/changelog_10.rst349
-rw-r--r--doc/build/changelog/migration_08.rst24
-rw-r--r--doc/build/changelog/migration_10.rst1030
-rw-r--r--doc/build/core/defaults.rst4
-rw-r--r--doc/build/core/engines.rst7
-rw-r--r--doc/build/core/sqlelement.rst5
-rw-r--r--doc/build/dialects/index.rst1
-rw-r--r--doc/build/dialects/sqlite.rst7
-rw-r--r--doc/build/orm/extensions/declarative.rst1
-rw-r--r--doc/build/orm/inheritance.rst6
-rw-r--r--doc/build/orm/relationships.rst135
-rw-r--r--lib/sqlalchemy/__init__.py1
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py30
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqlconnector.py48
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py49
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py100
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py80
-rw-r--r--lib/sqlalchemy/dialects/sqlite/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py85
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlcipher.py116
-rw-r--r--lib/sqlalchemy/engine/base.py159
-rw-r--r--lib/sqlalchemy/engine/interfaces.py18
-rw-r--r--lib/sqlalchemy/engine/reflection.py184
-rw-r--r--lib/sqlalchemy/engine/strategies.py1
-rw-r--r--lib/sqlalchemy/event/attr.py6
-rw-r--r--lib/sqlalchemy/event/registry.py40
-rw-r--r--lib/sqlalchemy/events.py5
-rw-r--r--lib/sqlalchemy/exc.py27
-rw-r--r--lib/sqlalchemy/ext/automap.py46
-rw-r--r--lib/sqlalchemy/ext/declarative/__init__.py117
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py185
-rw-r--r--lib/sqlalchemy/ext/declarative/base.py697
-rw-r--r--lib/sqlalchemy/ext/declarative/clsregistry.py7
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py7
-rw-r--r--lib/sqlalchemy/orm/collections.py58
-rw-r--r--lib/sqlalchemy/orm/events.py12
-rw-r--r--lib/sqlalchemy/orm/mapper.py21
-rw-r--r--lib/sqlalchemy/orm/persistence.py43
-rw-r--r--lib/sqlalchemy/orm/query.py81
-rw-r--r--lib/sqlalchemy/orm/relationships.py64
-rw-r--r--lib/sqlalchemy/orm/session.py108
-rw-r--r--lib/sqlalchemy/orm/state.py15
-rw-r--r--lib/sqlalchemy/orm/util.py29
-rw-r--r--lib/sqlalchemy/pool.py20
-rw-r--r--lib/sqlalchemy/sql/__init__.py1
-rw-r--r--lib/sqlalchemy/sql/compiler.py660
-rw-r--r--lib/sqlalchemy/sql/crud.py530
-rw-r--r--lib/sqlalchemy/sql/dml.py26
-rw-r--r--lib/sqlalchemy/sql/elements.py181
-rw-r--r--lib/sqlalchemy/sql/expression.py10
-rw-r--r--lib/sqlalchemy/sql/functions.py31
-rw-r--r--lib/sqlalchemy/sql/schema.py49
-rw-r--r--lib/sqlalchemy/sql/selectable.py322
-rw-r--r--lib/sqlalchemy/sql/util.py4
-rw-r--r--lib/sqlalchemy/testing/engines.py4
-rw-r--r--lib/sqlalchemy/testing/exclusions.py5
-rw-r--r--lib/sqlalchemy/testing/plugin/bootstrap.py44
-rw-r--r--lib/sqlalchemy/testing/plugin/noseplugin.py34
-rw-r--r--lib/sqlalchemy/testing/plugin/plugin_base.py56
-rw-r--r--lib/sqlalchemy/testing/plugin/pytestplugin.py14
-rw-r--r--lib/sqlalchemy/testing/provision.py (renamed from lib/sqlalchemy/testing/plugin/provision.py)16
-rw-r--r--lib/sqlalchemy/testing/requirements.py14
-rw-r--r--lib/sqlalchemy/testing/runner.py2
-rw-r--r--lib/sqlalchemy/testing/suite/test_insert.py37
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py95
-rw-r--r--lib/sqlalchemy/util/__init__.py3
-rw-r--r--lib/sqlalchemy/util/langhelpers.py25
-rw-r--r--setup.cfg7
-rwxr-xr-xsqla_nose.py29
-rw-r--r--test/base/test_events.py165
-rw-r--r--test/base/test_except.py123
-rw-r--r--test/base/test_utils.py39
-rwxr-xr-xtest/conftest.py20
-rw-r--r--test/dialect/mssql/test_engine.py20
-rw-r--r--test/dialect/mysql/test_reflection.py32
-rw-r--r--test/dialect/mysql/test_types.py6
-rw-r--r--test/dialect/postgresql/test_reflection.py180
-rw-r--r--test/dialect/test_oracle.py39
-rw-r--r--test/dialect/test_sqlite.py173
-rw-r--r--test/engine/test_execute.py94
-rw-r--r--test/engine/test_logging.py8
-rw-r--r--test/engine/test_reconnect.py50
-rw-r--r--test/engine/test_transaction.py218
-rw-r--r--test/ext/declarative/test_basic.py408
-rw-r--r--test/ext/declarative/test_clsregistry.py5
-rw-r--r--test/ext/declarative/test_inheritance.py394
-rw-r--r--test/ext/declarative/test_mixin.py285
-rw-r--r--test/ext/declarative/test_reflection.py193
-rw-r--r--test/ext/test_automap.py167
-rw-r--r--test/ext/test_orderinglist.py22
-rw-r--r--test/orm/inheritance/test_single.py206
-rw-r--r--test/orm/test_assorted_eager.py4
-rw-r--r--test/orm/test_attributes.py50
-rw-r--r--test/orm/test_bind.py413
-rw-r--r--test/orm/test_cascade.py8
-rw-r--r--test/orm/test_collection.py17
-rw-r--r--test/orm/test_eager_relations.py5
-rw-r--r--test/orm/test_events.py18
-rw-r--r--test/orm/test_joins.py39
-rw-r--r--test/orm/test_query.py44
-rw-r--r--test/orm/test_rel_fn.py20
-rw-r--r--test/orm/test_relationships.py2094
-rw-r--r--test/orm/test_session.py253
-rw-r--r--test/orm/test_update_delete.py183
-rw-r--r--test/profiles.txt146
-rw-r--r--test/requirements.py29
-rw-r--r--test/sql/test_compiler.py54
-rw-r--r--test/sql/test_defaults.py65
-rw-r--r--test/sql/test_functions.py112
-rw-r--r--test/sql/test_generative.py5
-rw-r--r--test/sql/test_insert.py82
-rw-r--r--test/sql/test_metadata.py86
-rw-r--r--test/sql/test_operators.py21
-rw-r--r--test/sql/test_query.py28
-rw-r--r--test/sql/test_selectable.py24
-rw-r--r--tox.ini24
122 files changed, 9393 insertions, 3710 deletions
diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 000000000..5d6c2bdc4
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,5 @@
+[run]
+include=lib/sqlalchemy/*
+
+[report]
+omit=lib/sqlalchemy/testing/* \ No newline at end of file
diff --git a/.gitignore b/.gitignore
index c22e53c39..55066f843 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,10 +6,12 @@
/doc/build/output/
/dogpile_data/
*.orig
+*,cover
/.tox
.venv
*.egg-info
.coverage
+coverage.xml
.*,cover
*.class
*.so
diff --git a/doc/build/changelog/changelog_08.rst b/doc/build/changelog/changelog_08.rst
index 002eaf704..6515f731d 100644
--- a/doc/build/changelog/changelog_08.rst
+++ b/doc/build/changelog/changelog_08.rst
@@ -2156,6 +2156,10 @@
Supported by Postgresql, SQLite, and MySQL.
Big thanks to Idan Kamara for doing the legwork on this one.
+ .. seealso::
+
+ :ref:`feature_2623`
+
.. change::
:tags: oracle, bug
:tickets: 2620
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst
index 44a2add71..6909da357 100644
--- a/doc/build/changelog/changelog_09.rst
+++ b/doc/build/changelog/changelog_09.rst
@@ -11,7 +11,195 @@
:start-line: 5
.. changelog::
+ :version: 0.9.9
+
+ .. change::
+ :tags: feature, sqlite
+ :versions: 1.0.0
+
+ Added a new SQLite backend for the SQLCipher backend. This backend
+ provides for encrypted SQLite databases using the pysqlcipher Python
+ driver, which is very similar to the pysqlite driver.
+
+ .. seealso::
+
+ :mod:`~sqlalchemy.dialects.sqlite.pysqlcipher`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3232
+ :versions: 1.0.0
+
+ Fixed bug where the ON clause for :meth:`.Query.join`,
+ and :meth:`.Query.outerjoin` to a single-inheritance subclass
+ using ``of_type()`` would not render the "single table criteria" in
+ the ON clause if the ``from_joinpoint=True`` flag were set.
+
+.. changelog::
:version: 0.9.8
+ :released: October 13, 2014
+
+ .. change::
+ :tags: bug, mysql, mysqlconnector
+ :versions: 1.0.0
+
+ Mysqlconnector as of version 2.0, probably as a side effect of
+ the python 3 merge, now does not expect percent signs (e.g.
+ as used as the modulus operator and others) to be doubled,
+ even when using the "pyformat" bound parameter format (this
+ change is not documented by Mysqlconnector). The dialect now
+ checks for py2k and for mysqlconnector less than version 2.0
+ when detecting if the modulus operator should be rendered as
+ ``%%`` or ``%``.
+
+ .. change::
+ :tags: bug, mysql, mysqlconnector
+ :versions: 1.0.0
+
+ Unicode SQL is now passed for MySQLconnector version 2.0 and above;
+ for Py2k and MySQL < 2.0, strings are encoded.
+
+
+ .. change::
+ :tags: bug, oracle
+ :versions: 1.0.0
+ :tickets: 2138
+
+ Fixed long-standing bug in Oracle dialect where bound parameter
+ names that started with numbers would not be quoted, as Oracle
+ doesn't like numerics in bound parameter names.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 1.0.0
+ :tickets: 3195
+
+ Fixed bug where a fair number of SQL elements within
+ the sql package would fail to ``__repr__()`` successfully,
+ due to a missing ``description`` attribute that would then invoke
+ a recursion overflow when an internal AttributeError would then
+ re-invoke ``__repr__()``.
+
+ .. change::
+ :tags: bug, declarative, orm
+ :versions: 1.0.0
+ :tickets: 3185
+
+ Fixed "'NoneType' object has no attribute 'concrete'" error
+ when using :class:`.AbstractConcreteBase` in conjunction with
+ a subclass that declares ``__abstract__``.
+
+ .. change::
+ :tags: bug, engine
+ :versions: 1.0.0
+ :tickets: 3200
+
+ The execution options passed to an :class:`.Engine` either via
+ :paramref:`.create_engine.execution_options` or
+ :meth:`.Engine.update_execution_options` are not passed to the
+ special :class:`.Connection` used to initialize the dialect
+ within the "first connect" event; dialects will usually
+ perform their own queries in this phase, and none of the
+ current available options should be applied here. In
+ particular, the "autocommit" option was causing an attempt to
+ autocommit within this initial connect which would fail with
+ an AttributeError due to the non-standard state of the
+ :class:`.Connection`.
+
+ .. change::
+ :tags: bug, sqlite
+ :versions: 1.0.0
+ :tickets: 3211
+
+ When selecting from a UNION using an attached database file,
+ the pysqlite driver reports column names in cursor.description
+ as 'dbname.tablename.colname', instead of 'tablename.colname' as
+ it normally does for a UNION (note that it's supposed to just be
+ 'colname' for both, but we work around it). The column translation
+ logic here has been adjusted to retrieve the rightmost token, rather
+ than the second token, so it works in both cases. Workaround
+ courtesy Tony Roberts.
+
+ .. change::
+ :tags: bug, postgresql
+ :versions: 1.0.0
+ :tickets: 3021
+
+ A revisit to this issue first patched in 0.9.5, apparently
+ psycopg2's ``.closed`` accessor is not as reliable as we assumed,
+ so we have added an explicit check for the exception messages
+ "SSL SYSCALL error: Bad file descriptor" and
+ "SSL SYSCALL error: EOF detected" when detecting an
+ is-disconnect scenario. We will continue to consult psycopg2's
+ connection.closed as a first check.
+
+ .. change::
+ :tags: bug, orm, engine
+ :versions: 1.0.0
+ :tickets: 3197
+
+ Fixed bug that affected generally the same classes of event
+ as that of :ticket:`3199`, when the ``named=True`` parameter
+ would be used. Some events would fail to register, and others
+ would not invoke the event arguments correctly, generally in the
+ case of when an event was "wrapped" for adaption in some other way.
+ The "named" mechanics have been rearranged to not interfere with
+ the argument signature expected by internal wrapper functions.
+
+ .. change::
+ :tags: bug, declarative
+ :versions: 1.0.0
+ :tickets: 3208
+
+ Fixed an unlikely race condition observed in some exotic end-user
+ setups, where the attempt to check for "duplicate class name" in
+ declarative would hit upon a not-totally-cleaned-up weak reference
+ related to some other class being removed; the check here now ensures
+ the weakref still references an object before calling upon it further.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 1.0.0
+ :tickets: 3199
+
+ Fixed bug that affected many classes of event, particularly
+ ORM events but also engine events, where the usual logic of
+ "de duplicating" a redundant call to :func:`.event.listen`
+ with the same arguments would fail, for those events where the
+ listener function is wrapped. An assertion would be hit within
+ registry.py. This assertion has now been integrated into the
+ deduplication check, with the added bonus of a simpler means
+ of checking deduplication across the board.
+
+ .. change::
+ :tags: bug, mssql
+ :versions: 1.0.0
+ :tickets: 3151
+
+ Fixed the version string detection in the pymssql dialect to
+ work with Microsoft SQL Azure, which changes the word "SQL Server"
+ to "SQL Azure".
+
+ .. change::
+ :tags: bug, orm
+ :versions: 1.0.0
+ :tickets: 3194
+
+ Fixed warning that would emit when a complex self-referential
+ primaryjoin contained functions, while at the same time remote_side
+ was specified; the warning would suggest setting "remote side".
+ It now only emits if remote_side isn't present.
+
+ .. change::
+ :tags: bug, ext
+ :versions: 1.0.0
+ :tickets: 3191
+
+ Fixed bug in ordering list where the order of items would be
+ thrown off during a collection replace event, if the
+ reorder_on_append flag were set to True. The fix ensures that the
+ ordering list only impacts the list that is explicitly associated
+ with the object.
.. change::
:tags: bug, sql
diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst
index 9c7f207cc..e63e023d9 100644
--- a/doc/build/changelog/changelog_10.rst
+++ b/doc/build/changelog/changelog_10.rst
@@ -22,6 +22,350 @@
on compatibility concerns, see :doc:`/changelog/migration_10`.
.. change::
+ :tags: bug, sql
+ :pullreq: github:146
+
+ Fixed the name of the :paramref:`.PoolEvents.reset.dbapi_connection`
+ parameter as passed to this event; in particular this affects
+ usage of the "named" argument style for this event. Pull request
+ courtesy Jason Goldberger.
+
+ .. change::
+ :tags: feature, sql
+ :pullreq: github:139
+
+ Added a new parameter :paramref:`.Table.tometadata.name` to
+ the :meth:`.Table.tometadata` method. Similar to
+ :paramref:`.Table.tometadata.schema`, this argument causes the newly
+ copied :class:`.Table` to take on the new name instead of
+ the existing one. An interesting capability this adds is that of
+ copying a :class:`.Table` object to the *same* :class:`.MetaData`
+ target with a new name. Pull request courtesy n.d. parker.
+
+ .. change::
+ :tags: bug, orm
+ :pullreq: github:137
+
+ Repaired support of the ``copy.deepcopy()`` call when used by the
+ :class:`.orm.util.CascadeOptions` argument, which occurs
+ if ``copy.deepcopy()`` is being used with :func:`.relationship`
+ (not an officially supported use case). Pull request courtesy
+ duesenfranz.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3170
+
+ Reversing a change that was made in 0.9, the "singleton" nature
+ of the "constants" :func:`.null`, :func:`.true`, and :func:`.false`
+ has been reverted. These functions returning a "singleton" object
+ had the effect that different instances would be treated as the
+ same regardless of lexical use, which in particular would impact
+ the rendering of the columns clause of a SELECT statement.
+
+ .. seealso::
+
+ :ref:`bug_3170`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3139
+
+ Fixed bug where :meth:`.Session.expunge` would not fully detach
+ the given object if the object had been subject to a delete
+ operation that was flushed, but not committed. This would also
+ affect related operations like :func:`.make_transient`.
+
+ .. seealso::
+
+ :ref:`bug_3139`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3230
+
+ A warning is emitted in the case of multiple relationships that
+ ultimately will populate a foreign key column in conflict with
+ another, where the relationships are attempting to copy values
+ from different source columns. This occurs in the case where
+ composite foreign keys with overlapping columns are mapped to
+ relationships that each refer to a different referenced column.
+ A new documentation section illustrates the example as well as how
+ to overcome the issue by specifying "foreign" columns specifically
+ on a per-relationship basis.
+
+ .. seealso::
+
+ :ref:`relationship_overlapping_foreignkeys`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3172
+
+ Exception messages have been spiffed up a bit. The SQL statement
+ and parameters are not displayed if None, reducing confusion for
+ error messages that weren't related to a statement. The full
+ module and classname for the DBAPI-level exception is displayed,
+ making it clear that this is a wrapped DBAPI exception. The
+ statement and parameters themselves are bounded within a bracketed
+ sections to better isolate them from the error message and from
+ each other.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3228
+
+ The :meth:`.Query.update` method will now convert string key
+ names in the given dictionary of values into mapped attribute names
+ against the mapped class being updated. Previously, string names
+ were taken in directly and passed to the core update statement without
+ any means to resolve against the mapped entity. Support for synonyms
+ and hybrid attributes as the subject attributes of
+ :meth:`.Query.update` are also supported.
+
+ .. seealso::
+
+ :ref:`bug_3228`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3035
+
+ Improvements to the mechanism used by :class:`.Session` to locate
+ "binds" (e.g. engines to use), such engines can be associated with
+ mixin classes, concrete subclasses, as well as a wider variety
+ of table metadata such as joined inheritance tables.
+
+ .. seealso::
+
+ :ref:`bug_3035`
+
+ .. change::
+ :tags: bug, general
+ :tickets: 3218
+
+ The ``__module__`` attribute is now set for all those SQL and
+ ORM functions that are derived as "public factory" symbols, which
+ should assist with documentation tools being able to report on the
+ target module.
+
+ .. change::
+ :tags: feature, sql
+
+ :meth:`.Insert.from_select` now includes Python and SQL-expression
+ defaults if otherwise unspecified; the limitation where non-
+ server column defaults aren't included in an INSERT FROM
+ SELECT is now lifted and these expressions are rendered as
+ constants into the SELECT statement.
+
+ .. seealso::
+
+ :ref:`feature_insert_from_select_defaults`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3233
+
+ Fixed bug in single table inheritance where a chain of joins
+ that included the same single inh entity more than once
+ (normally this should raise an error) could, in some cases
+ depending on what was being joined "from", implicitly alias the
+ second case of the single inh entity, producing
+ a query that "worked". But as this implicit aliasing is not
+ intended in the case of single table inheritance, it didn't
+ really "work" fully and was very misleading, since it wouldn't
+ always appear.
+
+ .. seealso::
+
+ :ref:`bug_3233`
+
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3222
+
+ The ON clause rendered when using :meth:`.Query.join`,
+ :meth:`.Query.outerjoin`, or the standalone :func:`.orm.join` /
+ :func:`.orm.outerjoin` functions to a single-inheritance subclass will
+ now include the "single table criteria" in the ON clause even
+ if the ON clause is otherwise hand-rolled; it is now added to the
+ criteria using AND, the same way as if joining to a single-table
+ target using relationship or similar.
+
+ This is sort of in-between feature and bug.
+
+ .. seealso::
+
+ :ref:`migration_3222`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3184
+ :pullreq: bitbucket:30
+
+ The :class:`.UniqueConstraint` construct is now included when
+ reflecting a :class:`.Table` object, for databases where this
+ is applicable. In order to achieve this
+ with sufficient accuracy, MySQL and Postgresql now contain features
+ that correct for the duplication of indexes and unique constraints
+ when reflecting tables, indexes, and constraints.
+ In the case of MySQL, there is not actually a "unique constraint"
+ concept independent of a "unique index", so for this backend
+ :class:`.UniqueConstraint` continues to remain non-present for a
+ reflected :class:`.Table`. For Postgresql, the query used to
+ detect indexes against ``pg_index`` has been improved to check for
+ the same construct in ``pg_constraint``, and the implicitly
+ constructed unique index is not included with a
+ reflected :class:`.Table`.
+
+ In both cases, the :meth:`.Inspector.get_indexes` and the
+ :meth:`.Inspector.get_unique_constraints` methods return both
+ constructs individually, but include a new token
+ ``duplicates_constraint`` in the case of Postgresql or
+ ``duplicates_index`` in the case
+ of MySQL to indicate when this condition is detected.
+ Pull request courtesy Johannes Erdfelt.
+
+ .. seealso::
+
+ :ref:`feature_3184`
+
+ .. change::
+ :tags: feature, postgresql
+ :pullreq: github:134
+
+ Added support for the FILTER keyword as applied to aggregate
+ functions, supported by Postgresql 9.4. Pull request
+ courtesy Ilja Everilä.
+
+ .. seealso::
+
+ :ref:`feature_gh134`
+
+ .. change::
+ :tags: bug, sql, engine
+ :tickets: 3215
+
+ Fixed bug where a "branched" connection, that is the kind you get
+ when you call :meth:`.Connection.connect`, would not share invalidation
+ status with the parent. The architecture of branching has been tweaked
+ a bit so that the branched connection defers to the parent for
+ all invalidation status and operations.
+
+ .. change::
+ :tags: bug, sql, engine
+ :tickets: 3190
+
+ Fixed bug where a "branched" connection, that is the kind you get
+ when you call :meth:`.Connection.connect`, would not share transaction
+ status with the parent. The architecture of branching has been tweaked
+ a bit so that the branched connection defers to the parent for
+ all transactional status and operations.
+
+ .. change::
+ :tags: bug, declarative
+ :tickets: 2670
+
+ A relationship set up with :class:`.declared_attr` on
+ a :class:`.AbstractConcreteBase` base class will now be configured
+ on the abstract base mapping automatically, in addition to being
+ set up on descendant concrete classes as usual.
+
+ .. seealso::
+
+ :ref:`feature_3150`
+
+ .. change::
+ :tags: feature, declarative
+ :tickets: 3150
+
+ The :class:`.declared_attr` construct has newly improved
+ behaviors and features in conjunction with declarative. The
+ decorated function will now have access to the final column
+ copies present on the local mixin when invoked, and will also
+ be invoked exactly once for each mapped class, the returned result
+ being memoized. A new modifier :attr:`.declared_attr.cascading`
+ is added as well.
+
+ .. seealso::
+
+ :ref:`feature_3150`
+
+ .. change::
+ :tags: feature, ext
+ :tickets: 3210
+
+ The :mod:`sqlalchemy.ext.automap` extension will now set
+ ``cascade="all, delete-orphan"`` automatically on a one-to-many
+ relationship/backref where the foreign key is detected as containing
+ one or more non-nullable columns. This argument is present in the
+ keywords passed to :func:`.automap.generate_relationship` in this
+ case and can still be overridden. Additionally, if the
+ :class:`.ForeignKeyConstraint` specifies ``ondelete="CASCADE"``
+ for a non-nullable or ``ondelete="SET NULL"`` for a nullable set
+ of columns, the argument ``passive_deletes=True`` is also added to the
+ relationship. Note that not all backends support reflection of
+ ondelete, but backends that do include Postgresql and MySQL.
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3206
+
+ Added new method :meth:`.Select.with_statement_hint` and ORM
+ method :meth:`.Query.with_statement_hint` to support statement-level
+ hints that are not specific to a table.
+
+ .. change::
+ :tags: bug, sqlite
+ :tickets: 3203
+ :pullreq: bitbucket:31
+
+ SQLite now supports reflection of unique constraints from
+ temp tables; previously, this would fail with a TypeError.
+ Pull request courtesy Johannes Erdfelt.
+
+ .. seealso::
+
+ :ref:`change_3204` - changes regarding SQLite temporary
+ table and view reflection.
+
+ .. change::
+ :tags: bug, sqlite
+ :tickets: 3204
+
+ Added :meth:`.Inspector.get_temp_table_names` and
+ :meth:`.Inspector.get_temp_view_names`; currently, only the
+ SQLite and Oracle dialects support these methods. The return of
+ temporary table and view names has been **removed** from SQLite and
+ Oracle's version of :meth:`.Inspector.get_table_names` and
+ :meth:`.Inspector.get_view_names`; other database backends cannot
+ support this information (such as MySQL), and the scope of operation
+ is different in that the tables can be local to a session and
+ typically aren't supported in remote schemas.
+
+ .. seealso::
+
+ :ref:`change_3204`
+
+ .. change::
+ :tags: feature, postgresql
+ :tickets: 2891
+ :pullreq: github:128
+
+ Support has been added for reflection of materialized views
+ and foreign tables, as well as support for materialized views
+ within :meth:`.Inspector.get_view_names`, and a new method
+ :meth:`.PGInspector.get_foreign_table_names` available on the
+ Postgresql version of :class:`.Inspector`. Pull request courtesy
+ Rodrigo Menezes.
+
+ .. seealso::
+
+ :ref:`feature_2891`
+
+
+ .. change::
:tags: feature, orm
Added new event handlers :meth:`.AttributeEvents.init_collection`
@@ -268,6 +612,11 @@
default, or a server-side default "eagerly" fetched via RETURNING.
.. change::
+ :tags: feature, oracle
+
+ Added support for the Oracle table option ON COMMIT.
+
+ .. change::
:tags: feature, postgresql
:tickets: 2051
diff --git a/doc/build/changelog/migration_08.rst b/doc/build/changelog/migration_08.rst
index 717a24c73..fd153a925 100644
--- a/doc/build/changelog/migration_08.rst
+++ b/doc/build/changelog/migration_08.rst
@@ -683,6 +683,30 @@ as more string, integer and date operators.
:ticket:`2547`
+.. _feature_2623:
+
+Multiple-VALUES support for Insert
+-----------------------------------
+
+The :meth:`.Insert.values` method now supports a list of dictionaries,
+which will render a multi-VALUES statement such as
+``VALUES (<row1>), (<row2>), ...``. This is only relevant to backends which
+support this syntax, including Postgresql, SQLite, and MySQL. It is
+not the same thing as the usual ``executemany()`` style of INSERT which
+remains unchanged::
+
+ users.insert().values([
+ {"name": "some name"},
+ {"name": "some other name"},
+ {"name": "yet another name"},
+ ])
+
+.. seealso::
+
+ :meth:`.Insert.values`
+
+:ticket:`2623`
+
Type Expressions
-----------------
diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst
index 6a48b31fa..bc7fa139f 100644
--- a/doc/build/changelog/migration_10.rst
+++ b/doc/build/changelog/migration_10.rst
@@ -8,7 +8,7 @@ What's New in SQLAlchemy 1.0?
undergoing maintenance releases as of May, 2014,
and SQLAlchemy version 1.0, as of yet unreleased.
- Document last updated: September 7, 2014
+ Document last updated: October 23, 2014
Introduction
============
@@ -25,6 +25,141 @@ potentially backwards-incompatible changes.
New Features
============
+.. _feature_3150:
+
+Improvements to declarative mixins, ``@declared_attr`` and related features
+----------------------------------------------------------------------------
+
+The declarative system in conjunction with :class:`.declared_attr` has been
+overhauled to support new capabilities.
+
+A function decorated with :class:`.declared_attr` is now called only **after**
+any mixin-based column copies are generated. This means the function can
+call upon mixin-established columns and will receive a reference to the correct
+:class:`.Column` object::
+
+ class HasFooBar(object):
+ foobar = Column(Integer)
+
+ @declared_attr
+ def foobar_prop(cls):
+ return column_property('foobar: ' + cls.foobar)
+
+ class SomeClass(HasFooBar, Base):
+ __tablename__ = 'some_table'
+ id = Column(Integer, primary_key=True)
+
+Above, ``SomeClass.foobar_prop`` will be invoked against ``SomeClass``,
+and ``SomeClass.foobar`` will be the final :class:`.Column` object that is
+to be mapped to ``SomeClass``, as opposed to the non-copied object present
+directly on ``HasFooBar``, even though the columns aren't mapped yet.
+
+The :class:`.declared_attr` function now **memoizes** the value
+that's returned on a per-class basis, so that repeated calls to the same
+attribute will return the same value. We can alter the example to illustrate
+this::
+
+ class HasFooBar(object):
+ @declared_attr
+ def foobar(cls):
+ return Column(Integer)
+
+ @declared_attr
+ def foobar_prop(cls):
+ return column_property('foobar: ' + cls.foobar)
+
+ class SomeClass(HasFooBar, Base):
+ __tablename__ = 'some_table'
+ id = Column(Integer, primary_key=True)
+
+Previously, ``SomeClass`` would be mapped with one particular copy of
+the ``foobar`` column, but the ``foobar_prop`` by calling upon ``foobar``
+a second time would produce a different column. The value of
+``SomeClass.foobar`` is now memoized during declarative setup time, so that
+even before the attribute is mapped by the mapper, the interim column
+value will remain consistent no matter how many times the
+:class:`.declared_attr` is called upon.
+
+The two behaviors above should help considerably with declarative definition
+of many types of mapper properties that derive from other attributes, where
+the :class:`.declared_attr` function is called upon from other
+:class:`.declared_attr` functions locally present before the class is
+actually mapped.
+
+For a pretty slim edge case where one wishes to build a declarative mixin
+that establishes distinct columns per subclass, a new modifier
+:attr:`.declared_attr.cascading` is added. With this modifier, the
+decorated function will be invoked individually for each class in the
+mapped inheritance hierarchy. While this is already the behavior for
+special attributes such as ``__table_args__`` and ``__mapper_args__``,
+for columns and other properties the behavior by default assumes that attribute
+is affixed to the base class only, and just inherited from subclasses.
+With :attr:`.declared_attr.cascading`, individual behaviors can be
+applied::
+
+ class HasSomeAttribute(object):
+ @declared_attr.cascading
+ def some_id(cls):
+ if has_inherited_table(cls):
+ return Column(ForeignKey('myclass.id'), primary_key=True)
+ else:
+ return Column(Integer, primary_key=True)
+
+ return Column('id', Integer, primary_key=True)
+
+ class MyClass(HasSomeAttribute, Base):
+ ""
+ # ...
+
+ class MySubClass(MyClass):
+ ""
+ # ...
+
+.. seealso::
+
+ :ref:`mixin_inheritance_columns`
+
+Finally, the :class:`.AbstractConcreteBase` class has been reworked
+so that a relationship or other mapper property can be set up inline
+on the abstract base::
+
+ from sqlalchemy import Column, Integer, ForeignKey
+ from sqlalchemy.orm import relationship
+ from sqlalchemy.ext.declarative import (declarative_base, declared_attr,
+ AbstractConcreteBase)
+
+ Base = declarative_base()
+
+ class Something(Base):
+ __tablename__ = u'something'
+ id = Column(Integer, primary_key=True)
+
+
+ class Abstract(AbstractConcreteBase, Base):
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def something_id(cls):
+ return Column(ForeignKey(Something.id))
+
+ @declared_attr
+ def something(cls):
+ return relationship(Something)
+
+
+ class Concrete(Abstract):
+ __tablename__ = u'cca'
+ __mapper_args__ = {'polymorphic_identity': 'cca', 'concrete': True}
+
+
+The above mapping will set up a table ``cca`` with both an ``id`` and
+a ``something_id`` column, and ``Concrete`` will also have a relationship
+``something``. The new feature is that ``Abstract`` will also have an
+independently configured relationship ``something`` that builds against
+the polymorphic union of the base.
+
+:ticket:`3150` :ticket:`2670` :ticket:`3149` :ticket:`2952` :ticket:`3050`
+
.. _feature_3034:
Select/Query LIMIT / OFFSET may be specified as an arbitrary SQL expression
@@ -37,7 +172,7 @@ any SQL expression, in addition to integer values, as arguments. The ORM
this is used to allow a bound parameter to be passed, which can be substituted
with a value later::
- sel = select([table]).limit(bindparam('mylimit')).offset(bindparam('myoffset'))
+ sel = select([table]).limit(bindparam('mylimit')).offset(bindparam('myoffset'))
Dialects which don't support non-integer LIMIT or OFFSET expressions may continue
to not support this behavior; third party dialects may also need modification
@@ -50,6 +185,170 @@ wishes to support the new feature should now call upon the ``._limit_clause``
and ``._offset_clause`` attributes to receive the full SQL expression, rather
than the integer value.
+.. _change_2051:
+
+.. _feature_insert_from_select_defaults:
+
+INSERT FROM SELECT now includes Python and SQL-expression defaults
+-------------------------------------------------------------------
+
+:meth:`.Insert.from_select` now includes Python and SQL-expression defaults if
+otherwise unspecified; the limitation where non-server column defaults
+aren't included in an INSERT FROM SELECT is now lifted and these
+expressions are rendered as constants into the SELECT statement::
+
+ from sqlalchemy import Table, Column, MetaData, Integer, select, func
+
+ m = MetaData()
+
+ t = Table(
+ 't', m,
+ Column('x', Integer),
+ Column('y', Integer, default=func.somefunction()))
+
+ stmt = select([t.c.x])
+ print t.insert().from_select(['x'], stmt)
+
+Will render::
+
+ INSERT INTO t (x, y) SELECT t.x, somefunction() AS somefunction_1
+ FROM t
+
+The feature can be disabled using
+:paramref:`.Insert.from_select.include_defaults`.
+
+New Postgresql Table options
+-----------------------------
+
+Added support for PG table options TABLESPACE, ON COMMIT,
+WITH(OUT) OIDS, and INHERITS, when rendering DDL via
+the :class:`.Table` construct.
+
+.. seealso::
+
+ :ref:`postgresql_table_options`
+
+:ticket:`2051`
+
+.. _feature_get_enums:
+
+New get_enums() method with Postgresql Dialect
+----------------------------------------------
+
+The :func:`.inspect` method returns a :class:`.PGInspector` object in the
+case of Postgresql, which includes a new :meth:`.PGInspector.get_enums`
+method that returns information on all available ``ENUM`` types::
+
+ from sqlalchemy import inspect, create_engine
+
+ engine = create_engine("postgresql+psycopg2://host/dbname")
+ insp = inspect(engine)
+ print(insp.get_enums())
+
+.. seealso::
+
+ :meth:`.PGInspector.get_enums`
+
+.. _feature_2891:
+
+Postgresql Dialect reflects Materialized Views, Foreign Tables
+--------------------------------------------------------------
+
+Changes are as follows:
+
+* the :class:`Table` construct with ``autoload=True`` will now match a name
+ that exists in the database as a materialized view or foriegn table.
+
+* :meth:`.Inspector.get_view_names` will return plain and materialized view
+ names.
+
+* :meth:`.Inspector.get_table_names` does **not** change for Postgresql, it
+ continues to return only the names of plain tables.
+
+* A new method :meth:`.PGInspector.get_foreign_table_names` is added which
+ will return the names of tables that are specifically marked as "foreign"
+ in the Postgresql schema tables.
+
+The change to reflection involves adding ``'m'`` and ``'f'`` to the list
+of qualifiers we use when querying ``pg_class.relkind``, but this change
+is new in 1.0.0 to avoid any backwards-incompatible surprises for those
+running 0.9 in production.
+
+:ticket:`2891`
+
+.. _feature_gh134:
+
+Postgresql FILTER keyword
+-------------------------
+
+The SQL standard FILTER keyword for aggregate functions is now supported
+by Postgresql as of 9.4. SQLAlchemy allows this using
+:meth:`.FunctionElement.filter`::
+
+ func.count(1).filter(True)
+
+.. seealso::
+
+ :meth:`.FunctionElement.filter`
+
+ :class:`.FunctionFilter`
+
+.. _feature_3184:
+
+UniqueConstraint is now part of the Table reflection process
+------------------------------------------------------------
+
+A :class:`.Table` object populated using ``autoload=True`` will now
+include :class:`.UniqueConstraint` constructs as well as
+:class:`.Index` constructs. This logic has a few caveats for
+Postgresql and Mysql:
+
+Postgresql
+^^^^^^^^^^
+
+Postgresql has the behavior such that when a UNIQUE constraint is
+created, it implicitly creates a UNIQUE INDEX corresponding to that
+constraint as well. The :meth:`.Inspector.get_indexes` and the
+:meth:`.Inspector.get_unique_constraints` methods will continue to
+**both** return these entries distinctly, where
+:meth:`.Inspector.get_indexes` now features a token
+``duplicates_constraint`` within the index entry indicating the
+corresponding constraint when detected. However, when performing
+full table reflection using ``Table(..., autoload=True)``, the
+:class:`.Index` construct is detected as being linked to the
+:class:`.UniqueConstraint`, and is **not** present within the
+:attr:`.Table.indexes` collection; only the :class:`.UniqueConstraint`
+will be present in the :attr:`.Table.constraints` collection. This
+deduplication logic works by joining to the ``pg_constraint`` table
+when querying ``pg_index`` to see if the two constructs are linked.
+
+MySQL
+^^^^^
+
+MySQL does not have separate concepts for a UNIQUE INDEX and a UNIQUE
+constraint. While it supports both syntaxes when creating tables and indexes,
+it does not store them any differently. The
+:meth:`.Inspector.get_indexes`
+and the :meth:`.Inspector.get_unique_constraints` methods will continue to
+**both** return an entry for a UNIQUE index in MySQL,
+where :meth:`.Inspector.get_unique_constraints` features a new token
+``duplicates_index`` within the constraint entry indicating that this is a
+dupe entry corresponding to that index. However, when performing
+full table reflection using ``Table(..., autoload=True)``,
+the :class:`.UniqueConstraint` construct is
+**not** part of the fully reflected :class:`.Table` construct under any
+circumstances; this construct is always represented by a :class:`.Index`
+with the ``unique=True`` setting present in the :attr:`.Table.indexes`
+collection.
+
+.. seealso::
+
+ :ref:`postgresql_index_reflection`
+
+ :ref:`mysql_unique_constraints`
+
+:ticket:`3184`
+
Behavioral Improvements
=======================
@@ -82,35 +381,35 @@ that a raw load of rows now populates ORM-based objects around 25% faster.
Assuming a 1M row table, a script like the following illustrates the type
of load that's improved the most::
- import time
- from sqlalchemy import Integer, Column, create_engine, Table
- from sqlalchemy.orm import Session
- from sqlalchemy.ext.declarative import declarative_base
+ import time
+ from sqlalchemy import Integer, Column, create_engine, Table
+ from sqlalchemy.orm import Session
+ from sqlalchemy.ext.declarative import declarative_base
- Base = declarative_base()
+ Base = declarative_base()
- class Foo(Base):
- __table__ = Table(
- 'foo', Base.metadata,
- Column('id', Integer, primary_key=True),
- Column('a', Integer(), nullable=False),
- Column('b', Integer(), nullable=False),
- Column('c', Integer(), nullable=False),
- )
+ class Foo(Base):
+ __table__ = Table(
+ 'foo', Base.metadata,
+ Column('id', Integer, primary_key=True),
+ Column('a', Integer(), nullable=False),
+ Column('b', Integer(), nullable=False),
+ Column('c', Integer(), nullable=False),
+ )
- engine = create_engine(
- 'mysql+mysqldb://scott:tiger@localhost/test', echo=True)
+ engine = create_engine(
+ 'mysql+mysqldb://scott:tiger@localhost/test', echo=True)
- sess = Session(engine)
+ sess = Session(engine)
- now = time.time()
+ now = time.time()
- # avoid using all() so that we don't have the overhead of building
- # a large list of full objects in memory
- for obj in sess.query(Foo).yield_per(100).limit(1000000):
- pass
+ # avoid using all() so that we don't have the overhead of building
+ # a large list of full objects in memory
+ for obj in sess.query(Foo).yield_per(100).limit(1000000):
+ pass
- print("Total time: %d" % (time.time() - now))
+ print("Total time: %d" % (time.time() - now))
Local MacBookPro results bench from 19 seconds for 0.9 down to 14 seconds for
1.0. The :meth:`.Query.yield_per` call is always a good idea when batching
@@ -121,7 +420,6 @@ MacBookPro is 31 seconds on 0.9 and 26 seconds on 1.0, the extra time spent
setting up very large memory buffers.
-
.. _feature_3176:
New KeyedTuple implementation dramatically faster
@@ -130,7 +428,7 @@ New KeyedTuple implementation dramatically faster
We took a look into the :class:`.KeyedTuple` implementation in the hopes
of improving queries like this::
- rows = sess.query(Foo.a, Foo.b, Foo.c).all()
+ rows = sess.query(Foo.a, Foo.b, Foo.c).all()
The :class:`.KeyedTuple` class is used rather than Python's
``collections.namedtuple()``, because the latter has a very complex
@@ -146,30 +444,73 @@ which scenario. In the "sweet spot", where we are both creating a good number
of new types as well as fetching a good number of rows, the lightweight
object totally smokes both namedtuple and KeyedTuple::
- -----------------
- size=10 num=10000 # few rows, lots of queries
- namedtuple: 3.60302400589 # namedtuple falls over
- keyedtuple: 0.255059957504 # KeyedTuple very fast
- lw keyed tuple: 0.582715034485 # lw keyed trails right on KeyedTuple
- -----------------
- size=100 num=1000 # <--- sweet spot
- namedtuple: 0.365247011185
- keyedtuple: 0.24896979332
- lw keyed tuple: 0.0889317989349 # lw keyed blows both away!
- -----------------
- size=10000 num=100
- namedtuple: 0.572599887848
- keyedtuple: 2.54251694679
- lw keyed tuple: 0.613876104355
- -----------------
- size=1000000 num=10 # few queries, lots of rows
- namedtuple: 5.79669594765 # namedtuple very fast
- keyedtuple: 28.856498003 # KeyedTuple falls over
- lw keyed tuple: 6.74346804619 # lw keyed trails right on namedtuple
+ -----------------
+ size=10 num=10000 # few rows, lots of queries
+ namedtuple: 3.60302400589 # namedtuple falls over
+ keyedtuple: 0.255059957504 # KeyedTuple very fast
+ lw keyed tuple: 0.582715034485 # lw keyed trails right on KeyedTuple
+ -----------------
+ size=100 num=1000 # <--- sweet spot
+ namedtuple: 0.365247011185
+ keyedtuple: 0.24896979332
+ lw keyed tuple: 0.0889317989349 # lw keyed blows both away!
+ -----------------
+ size=10000 num=100
+ namedtuple: 0.572599887848
+ keyedtuple: 2.54251694679
+ lw keyed tuple: 0.613876104355
+ -----------------
+ size=1000000 num=10 # few queries, lots of rows
+ namedtuple: 5.79669594765 # namedtuple very fast
+ keyedtuple: 28.856498003 # KeyedTuple falls over
+ lw keyed tuple: 6.74346804619 # lw keyed trails right on namedtuple
:ticket:`3176`
+.. _bug_3035:
+
+Session.get_bind() handles a wider variety of inheritance scenarios
+-------------------------------------------------------------------
+
+The :meth:`.Session.get_bind` method is invoked whenever a query or unit
+of work flush process seeks to locate the database engine that corresponds
+to a particular class. The method has been improved to handle a variety
+of inheritance-oriented scenarios, including:
+
+* Binding to a Mixin or Abstract Class::
+
+ class MyClass(SomeMixin, Base):
+ __tablename__ = 'my_table'
+ # ...
+
+ session = Session(binds={SomeMixin: some_engine})
+
+
+* Binding to inherited concrete subclasses individually based on table::
+
+ class BaseClass(Base):
+ __tablename__ = 'base'
+
+ # ...
+
+ class ConcreteSubClass(BaseClass):
+ __tablename__ = 'concrete'
+
+ # ...
+
+ __mapper_args__ = {'concrete': True}
+
+
+ session = Session(binds={
+ base_table: some_engine,
+ concrete_table: some_other_engine
+ })
+
+
+:ticket:`3035`
+
+
.. _feature_3178:
New systems to safely emit parameterized warnings
@@ -195,27 +536,27 @@ them as duplicates.
To illustrate, the following test script will show only ten warnings being
emitted for ten of the parameter sets, out of a total of 1000::
- from sqlalchemy import create_engine, Unicode, select, cast
- import random
- import warnings
+ from sqlalchemy import create_engine, Unicode, select, cast
+ import random
+ import warnings
- e = create_engine("sqlite://")
+ e = create_engine("sqlite://")
- # Use the "once" filter (which is also the default for Python
- # warnings). Exactly ten of these warnings will
- # be emitted; beyond that, the Python warnings registry will accumulate
- # new values as dupes of one of the ten existing.
- warnings.filterwarnings("once")
+ # Use the "once" filter (which is also the default for Python
+ # warnings). Exactly ten of these warnings will
+ # be emitted; beyond that, the Python warnings registry will accumulate
+ # new values as dupes of one of the ten existing.
+ warnings.filterwarnings("once")
- for i in range(1000):
- e.execute(select([cast(
- ('foo_%d' % random.randint(0, 1000000)).encode('ascii'), Unicode)]))
+ for i in range(1000):
+ e.execute(select([cast(
+ ('foo_%d' % random.randint(0, 1000000)).encode('ascii'), Unicode)]))
The format of the warning here is::
- /path/lib/sqlalchemy/sql/sqltypes.py:186: SAWarning: Unicode type received
- non-unicode bind param value 'foo_4852'. (this warning may be
- suppressed after 10 occurrences)
+ /path/lib/sqlalchemy/sql/sqltypes.py:186: SAWarning: Unicode type received
+ non-unicode bind param value 'foo_4852'. (this warning may be
+ suppressed after 10 occurrences)
:ticket:`3178`
@@ -233,15 +574,15 @@ However, as these objects are class-bound descriptors, they must be accessed
at the attribute. Below this is illustared using the
:attr:`.Mapper.all_orm_descriptors` namespace::
- class SomeObject(Base):
- # ...
+ class SomeObject(Base):
+ # ...
- @hybrid_property
- def some_prop(self):
- return self.value + 5
+ @hybrid_property
+ def some_prop(self):
+ return self.value + 5
- inspect(SomeObject).all_orm_descriptors.some_prop.info['foo'] = 'bar'
+ inspect(SomeObject).all_orm_descriptors.some_prop.info['foo'] = 'bar'
It is also available as a constructor argument for all :class:`.SchemaItem`
objects (e.g. :class:`.ForeignKey`, :class:`.UniqueConstraint` etc.) as well
@@ -258,26 +599,26 @@ Change to single-table-inheritance criteria when using from_self(), count()
Given a single-table inheritance mapping, such as::
- class Widget(Base):
- __table__ = 'widget_table'
+ class Widget(Base):
+ __table__ = 'widget_table'
- class FooWidget(Widget):
- pass
+ class FooWidget(Widget):
+ pass
Using :meth:`.Query.from_self` or :meth:`.Query.count` against a subclass
would produce a subquery, but then add the "WHERE" criteria for subtypes
to the outside::
- sess.query(FooWidget).from_self().all()
+ sess.query(FooWidget).from_self().all()
rendering::
- SELECT
- anon_1.widgets_id AS anon_1_widgets_id,
- anon_1.widgets_type AS anon_1_widgets_type
- FROM (SELECT widgets.id AS widgets_id, widgets.type AS widgets_type,
- FROM widgets) AS anon_1
- WHERE anon_1.widgets_type IN (?)
+ SELECT
+ anon_1.widgets_id AS anon_1_widgets_id,
+ anon_1.widgets_type AS anon_1_widgets_type
+ FROM (SELECT widgets.id AS widgets_id, widgets.type AS widgets_type,
+ FROM widgets) AS anon_1
+ WHERE anon_1.widgets_type IN (?)
The issue with this is that if the inner query does not specify all
columns, then we can't add the WHERE clause on the outside (it actually tries,
@@ -286,28 +627,93 @@ apparently goes way back to 0.6.5 with the note "may need to make more
adjustments to this". Well, those adjustments have arrived! So now the
above query will render::
- SELECT
- anon_1.widgets_id AS anon_1_widgets_id,
- anon_1.widgets_type AS anon_1_widgets_type
- FROM (SELECT widgets.id AS widgets_id, widgets.type AS widgets_type,
- FROM widgets
- WHERE widgets.type IN (?)) AS anon_1
+ SELECT
+ anon_1.widgets_id AS anon_1_widgets_id,
+ anon_1.widgets_type AS anon_1_widgets_type
+ FROM (SELECT widgets.id AS widgets_id, widgets.type AS widgets_type,
+ FROM widgets
+ WHERE widgets.type IN (?)) AS anon_1
So that queries that don't include "type" will still work!::
- sess.query(FooWidget.id).count()
+ sess.query(FooWidget.id).count()
Renders::
- SELECT count(*) AS count_1
- FROM (SELECT widgets.id AS widgets_id
- FROM widgets
- WHERE widgets.type IN (?)) AS anon_1
+ SELECT count(*) AS count_1
+ FROM (SELECT widgets.id AS widgets_id
+ FROM widgets
+ WHERE widgets.type IN (?)) AS anon_1
:ticket:`3177`
+.. _migration_3222:
+
+
+single-table-inheritance criteria added to all ON clauses unconditionally
+-------------------------------------------------------------------------
+
+When joining to a single-table inheritance subclass target, the ORM always adds
+the "single table criteria" when joining on a relationship. Given a
+mapping as::
+
+ class Widget(Base):
+ __tablename__ = 'widget'
+ id = Column(Integer, primary_key=True)
+ type = Column(String)
+ related_id = Column(ForeignKey('related.id'))
+ related = relationship("Related", backref="widget")
+ __mapper_args__ = {'polymorphic_on': type}
+
+
+ class FooWidget(Widget):
+ __mapper_args__ = {'polymorphic_identity': 'foo'}
+
+
+ class Related(Base):
+ __tablename__ = 'related'
+ id = Column(Integer, primary_key=True)
+
+It's been the behavior for quite some time that a JOIN on the relationship
+will render a "single inheritance" clause for the type::
+
+ s.query(Related).join(FooWidget, Related.widget).all()
+
+SQL output::
+
+ SELECT related.id AS related_id
+ FROM related JOIN widget ON related.id = widget.related_id AND widget.type IN (:type_1)
+
+Above, because we joined to a subclass ``FooWidget``, :meth:`.Query.join`
+knew to add the ``AND widget.type IN ('foo')`` criteria to the ON clause.
+
+The change here is that the ``AND widget.type IN()`` criteria is now appended
+to *any* ON clause, not just those generated from a relationship,
+including one that is explicitly stated::
+
+ # ON clause will now render as
+ # related.id = widget.related_id AND widget.type IN (:type_1)
+ s.query(Related).join(FooWidget, FooWidget.related_id == Related.id).all()
+
+As well as the "implicit" join when no ON clause of any kind is stated::
+
+ # ON clause will now render as
+ # related.id = widget.related_id AND widget.type IN (:type_1)
+ s.query(Related).join(FooWidget).all()
+
+Previously, the ON clause for these would not include the single-inheritance
+criteria. Applications that are already adding this criteria to work around
+this will want to remove its explicit use, though it should continue to work
+fine if the criteria happens to be rendered twice in the meantime.
+
+.. seealso::
+
+ :ref:`bug_3233`
+
+:ticket:`3222`
+
.. _bug_3188:
ColumnProperty constructs work a lot better with aliases, order_by
@@ -319,67 +725,67 @@ as the "order by label" logic introduced in 0.9 (see :ref:`migration_1068`).
Given a mapping like the following::
- class A(Base):
- __tablename__ = 'a'
+ class A(Base):
+ __tablename__ = 'a'
- id = Column(Integer, primary_key=True)
+ id = Column(Integer, primary_key=True)
- class B(Base):
- __tablename__ = 'b'
+ class B(Base):
+ __tablename__ = 'b'
- id = Column(Integer, primary_key=True)
- a_id = Column(ForeignKey('a.id'))
+ id = Column(Integer, primary_key=True)
+ a_id = Column(ForeignKey('a.id'))
- A.b = column_property(
- select([func.max(B.id)]).where(B.a_id == A.id).correlate(A)
- )
+ A.b = column_property(
+ select([func.max(B.id)]).where(B.a_id == A.id).correlate(A)
+ )
A simple scenario that included "A.b" twice would fail to render
correctly::
- print sess.query(A, a1).order_by(a1.b)
+ print sess.query(A, a1).order_by(a1.b)
This would order by the wrong column::
- SELECT a.id AS a_id, (SELECT max(b.id) AS max_1 FROM b
- WHERE b.a_id = a.id) AS anon_1, a_1.id AS a_1_id,
- (SELECT max(b.id) AS max_2
- FROM b WHERE b.a_id = a_1.id) AS anon_2
- FROM a, a AS a_1 ORDER BY anon_1
+ SELECT a.id AS a_id, (SELECT max(b.id) AS max_1 FROM b
+ WHERE b.a_id = a.id) AS anon_1, a_1.id AS a_1_id,
+ (SELECT max(b.id) AS max_2
+ FROM b WHERE b.a_id = a_1.id) AS anon_2
+ FROM a, a AS a_1 ORDER BY anon_1
New output::
- SELECT a.id AS a_id, (SELECT max(b.id) AS max_1
- FROM b WHERE b.a_id = a.id) AS anon_1, a_1.id AS a_1_id,
- (SELECT max(b.id) AS max_2
- FROM b WHERE b.a_id = a_1.id) AS anon_2
- FROM a, a AS a_1 ORDER BY anon_2
+ SELECT a.id AS a_id, (SELECT max(b.id) AS max_1
+ FROM b WHERE b.a_id = a.id) AS anon_1, a_1.id AS a_1_id,
+ (SELECT max(b.id) AS max_2
+ FROM b WHERE b.a_id = a_1.id) AS anon_2
+ FROM a, a AS a_1 ORDER BY anon_2
There were also many scenarios where the "order by" logic would fail
to order by label, for example if the mapping were "polymorphic"::
- class A(Base):
- __tablename__ = 'a'
+ class A(Base):
+ __tablename__ = 'a'
- id = Column(Integer, primary_key=True)
- type = Column(String)
+ id = Column(Integer, primary_key=True)
+ type = Column(String)
- __mapper_args__ = {'polymorphic_on': type, 'with_polymorphic': '*'}
+ __mapper_args__ = {'polymorphic_on': type, 'with_polymorphic': '*'}
The order_by would fail to use the label, as it would be anonymized due
to the polymorphic loading::
- SELECT a.id AS a_id, a.type AS a_type, (SELECT max(b.id) AS max_1
- FROM b WHERE b.a_id = a.id) AS anon_1
- FROM a ORDER BY (SELECT max(b.id) AS max_2
- FROM b WHERE b.a_id = a.id)
+ SELECT a.id AS a_id, a.type AS a_type, (SELECT max(b.id) AS max_1
+ FROM b WHERE b.a_id = a.id) AS anon_1
+ FROM a ORDER BY (SELECT max(b.id) AS max_2
+ FROM b WHERE b.a_id = a.id)
Now that the order by label tracks the anonymized label, this now works::
- SELECT a.id AS a_id, a.type AS a_type, (SELECT max(b.id) AS max_1
- FROM b WHERE b.a_id = a.id) AS anon_1
- FROM a ORDER BY anon_1
+ SELECT a.id AS a_id, a.type AS a_type, (SELECT max(b.id) AS max_1
+ FROM b WHERE b.a_id = a.id) AS anon_1
+ FROM a ORDER BY anon_1
Included in these fixes are a variety of heisenbugs that could corrupt
the state of an ``aliased()`` construct such that the labeling logic
@@ -387,11 +793,88 @@ would again fail; these have also been fixed.
:ticket:`3148` :ticket:`3188`
+.. _bug_3170:
+
+null(), false() and true() constants are no longer singletons
+-------------------------------------------------------------
+
+These three constants were changed to return a "singleton" value
+in 0.9; unfortunately, that would lead to a query like the following
+to not render as expected::
+
+ select([null(), null()])
+
+rendering only ``SELECT NULL AS anon_1``, because the two :func:`.null`
+constructs would come out as the same ``NULL`` object, and
+SQLAlchemy's Core model is based on object identity in order to
+determine lexical significance. The change in 0.9 had no
+importance other than the desire to save on object overhead; in general,
+an unnamed construct needs to stay lexically unique so that it gets
+labeled uniquely.
+
+:ticket:`3170`
+
.. _behavioral_changes_orm_10:
Behavioral Changes - ORM
========================
+.. _bug_3228:
+
+query.update() now resolves string names into mapped attribute names
+--------------------------------------------------------------------
+
+The documentation for :meth:`.Query.update` states that the given
+``values`` dictionary is "a dictionary with attributes names as keys",
+implying that these are mapped attribute names. Unfortunately, the function
+was designed more in mind to receive attributes and SQL expressions and
+not as much strings; when strings
+were passed, these strings would be passed through straight to the core
+update statement without any resolution as far as how these names are
+represented on the mapped class, meaning the name would have to match that
+of a table column exactly, not how an attribute of that name was mapped
+onto the class.
+
+The string names are now resolved as attribute names in earnest::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ name = Column('user_name', String(50))
+
+Above, the column ``user_name`` is mapped as ``name``. Previously,
+a call to :meth:`.Query.update` that was passed strings would have to
+have been called as follows::
+
+ session.query(User).update({'user_name': 'moonbeam'})
+
+The given string is now resolved against the entity::
+
+ session.query(User).update({'name': 'moonbeam'})
+
+It is typically preferable to use the attribute directly, to avoid any
+ambiguity::
+
+ session.query(User).update({User.name: 'moonbeam'})
+
+The change also indicates that synonyms and hybrid attributes can be referred
+to by string name as well::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ name = Column('user_name', String(50))
+
+ @hybrid_property
+ def fullname(self):
+ return self.name
+
+ session.query(User).update({'fullname': 'moonbeam'})
+
+:ticket:`3228`
+
.. _migration_3061:
Changes to attribute events and other operations regarding attributes that have no pre-existing value
@@ -406,13 +889,13 @@ for :func:`.attributes.get_history` and related functions.
Given an object with no state::
- >>> obj = Foo()
+ >>> obj = Foo()
It has always been SQLAlchemy's behavior such that if we access a scalar
or many-to-one attribute that was never set, it is returned as ``None``::
- >>> obj.someattr
- None
+ >>> obj.someattr
+ None
This value of ``None`` is in fact now part of the state of ``obj``, and is
not unlike as though we had set the attribute explicitly, e.g.
@@ -420,31 +903,31 @@ not unlike as though we had set the attribute explicitly, e.g.
differently as far as history and events. It would not emit any attribute
event, and additionally if we view history, we see this::
- >>> inspect(obj).attrs.someattr.history
- History(added=(), unchanged=[None], deleted=()) # 0.9 and below
+ >>> inspect(obj).attrs.someattr.history
+ History(added=(), unchanged=[None], deleted=()) # 0.9 and below
That is, it's as though the attribute were always ``None`` and were
never changed. This is explicitly different from if we had set the
attribute first instead::
- >>> obj = Foo()
- >>> obj.someattr = None
- >>> inspect(obj).attrs.someattr.history
- History(added=[None], unchanged=(), deleted=()) # all versions
+ >>> obj = Foo()
+ >>> obj.someattr = None
+ >>> inspect(obj).attrs.someattr.history
+ History(added=[None], unchanged=(), deleted=()) # all versions
The above means that the behavior of our "set" operation can be corrupted
by the fact that the value was accessed via "get" earlier. In 1.0, this
inconsistency has been resolved, by no longer actually setting anything
when the default "getter" is used.
- >>> obj = Foo()
- >>> obj.someattr
- None
- >>> inspect(obj).attrs.someattr.history
- History(added=(), unchanged=(), deleted=()) # 1.0
- >>> obj.someattr = None
- >>> inspect(obj).attrs.someattr.history
- History(added=[None], unchanged=(), deleted=())
+ >>> obj = Foo()
+ >>> obj.someattr
+ None
+ >>> inspect(obj).attrs.someattr.history
+ History(added=(), unchanged=(), deleted=()) # 1.0
+ >>> obj.someattr = None
+ >>> inspect(obj).attrs.someattr.history
+ History(added=[None], unchanged=(), deleted=())
The reason the above behavior hasn't had much impact is because the
INSERT statement in relational databases considers a missing value to be
@@ -469,6 +952,39 @@ symbol, and no change to the object's state occurs.
:ticket:`3061`
+.. _bug_3139:
+
+session.expunge() will fully detach an object that's been deleted
+-----------------------------------------------------------------
+
+The behavior of :meth:`.Session.expunge` had a bug that caused an
+inconsistency in behavior regarding deleted objects. The
+:func:`.object_session` function as well as the :attr:`.InstanceState.session`
+attribute would still report object as belonging to the :class:`.Session`
+subsequent to the expunge::
+
+ u1 = sess.query(User).first()
+ sess.delete(u1)
+
+ sess.flush()
+
+ assert u1 not in sess
+ assert inspect(u1).session is sess # this is normal before commit
+
+ sess.expunge(u1)
+
+ assert u1 not in sess
+ assert inspect(u1).session is None # would fail
+
+Note that it is normal for ``u1 not in sess`` to be True while
+``inspect(u1).session`` still refers to the session, while the transaction
+is ongoing subsequent to the delete operation and :meth:`.Session.expunge`
+has not been called; the full detachment normally completes once the
+transaction is committed. This issue would also impact functions
+that rely on :meth:`.Session.expunge` such as :func:`.make_transient`.
+
+:ticket:`3139`
+
.. _migration_yield_per_eager_loading:
Joined/Subquery eager loading explicitly disallowed with yield_per
@@ -482,17 +998,102 @@ with yield-per (subquery loading could be in theory, however).
When this error is raised, the :func:`.lazyload` option can be sent with
an asterisk::
- q = sess.query(Object).options(lazyload('*')).yield_per(100)
+ q = sess.query(Object).options(lazyload('*')).yield_per(100)
or use :meth:`.Query.enable_eagerloads`::
- q = sess.query(Object).enable_eagerloads(False).yield_per(100)
+ q = sess.query(Object).enable_eagerloads(False).yield_per(100)
The :func:`.lazyload` option has the advantage that additional many-to-one
joined loader options can still be used::
- q = sess.query(Object).options(
- lazyload('*'), joinedload("some_manytoone")).yield_per(100)
+ q = sess.query(Object).options(
+ lazyload('*'), joinedload("some_manytoone")).yield_per(100)
+
+.. _bug_3233:
+
+Single inheritance join targets will no longer sometimes implicitly alias themselves
+------------------------------------------------------------------------------------
+
+This is a bug where an unexpected and inconsistent behavior would occur
+in some scenarios when joining to a single-table-inheritance entity. The
+difficulty this might cause is that the query is supposed to raise an error,
+as it is invalid SQL, however the bug would cause an alias to be added which
+makes the query "work". The issue is confusing because this aliasing
+is not applied consistently and could change based on the nature of the query
+preceding the join.
+
+A simple example is::
+
+ from sqlalchemy import Integer, Column, String, ForeignKey
+ from sqlalchemy.orm import Session, relationship
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class A(Base):
+ __tablename__ = "a"
+
+ id = Column(Integer, primary_key=True)
+ type = Column(String)
+
+ __mapper_args__ = {'polymorphic_on': type, 'polymorphic_identity': 'a'}
+
+
+ class ASub1(A):
+ __mapper_args__ = {'polymorphic_identity': 'asub1'}
+
+
+ class ASub2(A):
+ __mapper_args__ = {'polymorphic_identity': 'asub2'}
+
+
+ class B(Base):
+ __tablename__ = 'b'
+
+ id = Column(Integer, primary_key=True)
+
+ a_id = Column(Integer, ForeignKey("a.id"))
+
+ a = relationship("A", primaryjoin="B.a_id == A.id", backref='b')
+
+ s = Session()
+
+ print s.query(ASub1).join(B, ASub1.b).join(ASub2, B.a)
+
+ print s.query(ASub1).join(B, ASub1.b).join(ASub2, ASub2.id == B.a_id)
+
+The two queries at the bottom are equivalent, and should both render
+the identical SQL::
+
+ SELECT a.id AS a_id, a.type AS a_type
+ FROM a JOIN b ON b.a_id = a.id JOIN a ON b.a_id = a.id AND a.type IN (:type_1)
+ WHERE a.type IN (:type_2)
+
+The above SQL is invalid, as it renders "a" within the FROM list twice.
+The bug however would occur with the second query only and render this instead::
+
+ SELECT a.id AS a_id, a.type AS a_type
+ FROM a JOIN b ON b.a_id = a.id JOIN a AS a_1
+ ON a_1.id = b.a_id AND a_1.type IN (:type_1)
+ WHERE a_1.type IN (:type_2)
+
+Where above, the second join to "a" is aliased. While this seems convenient,
+it's not how single-inheritance queries work in general and is misleading
+and inconsistent.
+
+The net effect is that applications which were relying on this bug will now
+have an error raised by the database. The solution is to use the expected
+form. When referring to multiple subclasses of a single-inheritance
+entity in a query, you must manually use aliases to disambiguate the table,
+as all the subclasses normally refer to the same table::
+
+ asub2_alias = aliased(ASub2)
+
+ print s.query(ASub1).join(B, ASub1.b).join(asub2_alias, B.a.of_type(asub2_alias))
+
+:ticket:`3233`
+
.. _migration_migration_deprecated_orm_events:
@@ -546,7 +1147,7 @@ The unused ``result`` member is now removed::
.. seealso::
- :ref:`bundles`
+ :ref:`bundles`
.. _migration_3008:
@@ -565,12 +1166,12 @@ As introduced in :ref:`feature_2976` from version 0.9, the behavior of
join eager load will use a right-nested join. ``"nested"`` is now implied
when using ``innerjoin=True``::
- query(User).options(
- joinedload("orders", innerjoin=False).joinedload("items", innerjoin=True))
+ query(User).options(
+ joinedload("orders", innerjoin=False).joinedload("items", innerjoin=True))
With the new default, this will render the FROM clause in the form::
- FROM users LEFT OUTER JOIN (orders JOIN items ON <onclause>) ON <onclause>
+ FROM users LEFT OUTER JOIN (orders JOIN items ON <onclause>) ON <onclause>
That is, using a right-nested join for the INNER join so that the full
result of ``users`` can be returned. The use of an INNER join is more efficient
@@ -579,13 +1180,13 @@ optimization parameter to take effect in all cases.
To get the older behavior, use ``innerjoin="unnested"``::
- query(User).options(
- joinedload("orders", innerjoin=False).joinedload("items", innerjoin="unnested"))
+ query(User).options(
+ joinedload("orders", innerjoin=False).joinedload("items", innerjoin="unnested"))
This will avoid right-nested joins and chain the joins together using all
OUTER joins despite the innerjoin directive::
- FROM users LEFT OUTER JOIN orders ON <onclause> LEFT OUTER JOIN items ON <onclause>
+ FROM users LEFT OUTER JOIN orders ON <onclause> LEFT OUTER JOIN items ON <onclause>
As noted in the 0.9 notes, the only database backend that has difficulty
with right-nested joins is SQLite; SQLAlchemy as of 0.9 converts a right-nested
@@ -593,7 +1194,7 @@ join into a subquery as a join target on SQLite.
.. seealso::
- :ref:`feature_2976` - description of the feature as introduced in 0.9.4.
+ :ref:`feature_2976` - description of the feature as introduced in 0.9.4.
:ticket:`3008`
@@ -638,15 +1239,15 @@ with SQL expressions into many functions, such as :meth:`.Select.where`,
Note that by "SQL expressions" we mean a **full fragment of a SQL string**,
such as::
- # the argument sent to where() is a full SQL expression
- stmt = select([sometable]).where("somecolumn = 'value'")
+ # the argument sent to where() is a full SQL expression
+ stmt = select([sometable]).where("somecolumn = 'value'")
and we are **not talking about string arguments**, that is, the normal
behavior of passing string values that become parameterized::
- # This is a normal Core expression with a string argument -
- # we aren't talking about this!!
- stmt = select([sometable]).where(sometable.c.somecolumn == 'value')
+ # This is a normal Core expression with a string argument -
+ # we aren't talking about this!!
+ stmt = select([sometable]).where(sometable.c.somecolumn == 'value')
The Core tutorial has long featured an example of the use of this technique,
using a :func:`.select` construct where virtually all components of it
@@ -660,25 +1261,25 @@ So the change here is to encourage the user to qualify textual strings when
composing SQL that is partially or fully composed from textual fragments.
When composing a select as below::
- stmt = select(["a", "b"]).where("a = b").select_from("sometable")
+ stmt = select(["a", "b"]).where("a = b").select_from("sometable")
The statement is built up normally, with all the same coercions as before.
However, one will see the following warnings emitted::
- SAWarning: Textual column expression 'a' should be explicitly declared
- with text('a'), or use column('a') for more specificity
- (this warning may be suppressed after 10 occurrences)
+ SAWarning: Textual column expression 'a' should be explicitly declared
+ with text('a'), or use column('a') for more specificity
+ (this warning may be suppressed after 10 occurrences)
- SAWarning: Textual column expression 'b' should be explicitly declared
- with text('b'), or use column('b') for more specificity
- (this warning may be suppressed after 10 occurrences)
+ SAWarning: Textual column expression 'b' should be explicitly declared
+ with text('b'), or use column('b') for more specificity
+ (this warning may be suppressed after 10 occurrences)
- SAWarning: Textual SQL expression 'a = b' should be explicitly declared
- as text('a = b') (this warning may be suppressed after 10 occurrences)
+ SAWarning: Textual SQL expression 'a = b' should be explicitly declared
+ as text('a = b') (this warning may be suppressed after 10 occurrences)
- SAWarning: Textual SQL FROM expression 'sometable' should be explicitly
- declared as text('sometable'), or use table('sometable') for more
- specificity (this warning may be suppressed after 10 occurrences)
+ SAWarning: Textual SQL FROM expression 'sometable' should be explicitly
+ declared as text('sometable'), or use table('sometable') for more
+ specificity (this warning may be suppressed after 10 occurrences)
These warnings attempt to show exactly where the issue is by displaying
the parameters as well as where the string was received.
@@ -688,14 +1289,14 @@ one wishes the warnings to be exceptions, the
`Python Warnings Filter <https://docs.python.org/2/library/warnings.html>`_
should be used::
- import warnings
- warnings.simplefilter("error") # all warnings raise an exception
+ import warnings
+ warnings.simplefilter("error") # all warnings raise an exception
Given the above warnings, our statement works just fine, but
to get rid of the warnings we would rewrite our statement as follows::
- from sqlalchemy import select, text
- stmt = select([
+ from sqlalchemy import select, text
+ stmt = select([
text("a"),
text("b")
]).where(text("a = b")).select_from(text("sometable"))
@@ -703,10 +1304,10 @@ to get rid of the warnings we would rewrite our statement as follows::
and as the warnings suggest, we can give our statement more specificity
about the text if we use :func:`.column` and :func:`.table`::
- from sqlalchemy import select, text, column, table
+ from sqlalchemy import select, text, column, table
- stmt = select([column("a"), column("b")]).\
- where(text("a = b")).select_from(table("sometable"))
+ stmt = select([column("a"), column("b")]).\
+ where(text("a = b")).select_from(table("sometable"))
Where note also that :func:`.table` and :func:`.column` can now
be imported from "sqlalchemy" without the "sql" part.
@@ -723,10 +1324,10 @@ of this change we have enhanced its functionality. When we have a
:func:`.select` or :class:`.Query` that refers to some column name or named
label, we might want to GROUP BY and/or ORDER BY known columns or labels::
- stmt = select([
- user.c.name,
- func.count(user.c.id).label("id_count")
- ]).group_by("name").order_by("id_count")
+ stmt = select([
+ user.c.name,
+ func.count(user.c.id).label("id_count")
+ ]).group_by("name").order_by("id_count")
In the above statement we expect to see "ORDER BY id_count", as opposed to a
re-statement of the function. The string argument given is actively
@@ -734,24 +1335,24 @@ matched to an entry in the columns clause during compilation, so the above
statement would produce as we expect, without warnings (though note that
the ``"name"`` expression has been resolved to ``users.name``!)::
- SELECT users.name, count(users.id) AS id_count
- FROM users GROUP BY users.name ORDER BY id_count
+ SELECT users.name, count(users.id) AS id_count
+ FROM users GROUP BY users.name ORDER BY id_count
However, if we refer to a name that cannot be located, then we get
the warning again, as below::
- stmt = select([
+ stmt = select([
user.c.name,
func.count(user.c.id).label("id_count")
]).order_by("some_label")
The output does what we say, but again it warns us::
- SAWarning: Can't resolve label reference 'some_label'; converting to
- text() (this warning may be suppressed after 10 occurrences)
+ SAWarning: Can't resolve label reference 'some_label'; converting to
+ text() (this warning may be suppressed after 10 occurrences)
- SELECT users.name, count(users.id) AS id_count
- FROM users ORDER BY some_label
+ SELECT users.name, count(users.id) AS id_count
+ FROM users ORDER BY some_label
The above behavior applies to all those places where we might want to refer
to a so-called "label reference"; ORDER BY and GROUP BY, but also within an
@@ -761,7 +1362,7 @@ Postgresql syntax).
We can still specify any arbitrary expression for ORDER BY or others using
:func:`.text`::
- stmt = select([users]).order_by(text("some special expression"))
+ stmt = select([users]).order_by(text("some special expression"))
The upshot of the whole change is that SQLAlchemy now would like us
to tell it when a string is sent that this string is explicitly
@@ -822,7 +1423,7 @@ data is needed.
A :class:`.Table` can be set up for reflection by passing
:paramref:`.Table.autoload_with` alone::
- my_table = Table('my_table', metadata, autoload_with=some_engine)
+ my_table = Table('my_table', metadata, autoload_with=some_engine)
:ticket:`3027`
@@ -831,39 +1432,6 @@ A :class:`.Table` can be set up for reflection by passing
Dialect Changes
===============
-.. _change_2051:
-
-New Postgresql Table options
------------------------------
-
-Added support for PG table options TABLESPACE, ON COMMIT,
-WITH(OUT) OIDS, and INHERITS, when rendering DDL via
-the :class:`.Table` construct.
-
-.. seealso::
-
- :ref:`postgresql_table_options`
-
-:ticket:`2051`
-
-.. _feature_get_enums:
-
-New get_enums() method with Postgresql Dialect
-----------------------------------------------
-
-The :func:`.inspect` method returns a :class:`.PGInspector` object in the
-case of Postgresql, which includes a new :meth:`.PGInspector.get_enums`
-method that returns information on all available ``ENUM`` types::
-
- from sqlalchemy import inspect, create_engine
-
- engine = create_engine("postgresql+psycopg2://host/dbname")
- insp = inspect(engine)
- print(insp.get_enums())
-
-.. seealso::
-
- :meth:`.PGInspector.get_enums`
MySQL internal "no such table" exceptions not passed to event handlers
----------------------------------------------------------------------
@@ -925,6 +1493,26 @@ when using ODBC to avoid this issue entirely.
:ticket:`3182`
+.. _change_3204:
+
+SQLite/Oracle have distinct methods for temporary table/view name reporting
+---------------------------------------------------------------------------
+
+The :meth:`.Inspector.get_table_names` and :meth:`.Inspector.get_view_names`
+methods in the case of SQLite/Oracle would also return the names of temporary
+tables and views, which is not provided by any other dialect (in the case
+of MySQL at least it is not even possible). This logic has been moved
+out to two new methods :meth:`.Inspector.get_temp_table_names` and
+:meth:`.Inspector.get_temp_view_names`.
+
+Note that reflection of a specific named temporary table or temporary view,
+either by ``Table('name', autoload=True)`` or via methods like
+:meth:`.Inspector.get_columns` continues to function for most if not all
+dialects. For SQLite specifically, there is a bug fix for UNIQUE constraint
+reflection from temp tables as well, which is :ticket:`3203`.
+
+:ticket:`3204`
+
.. _change_2984:
Drizzle Dialect is now an External Dialect
diff --git a/doc/build/core/defaults.rst b/doc/build/core/defaults.rst
index 166273c18..1d55cd6c6 100644
--- a/doc/build/core/defaults.rst
+++ b/doc/build/core/defaults.rst
@@ -1,6 +1,8 @@
+.. module:: sqlalchemy.schema
+
.. _metadata_defaults_toplevel:
+
.. _metadata_defaults:
-.. module:: sqlalchemy.schema
Column Insert/Update Defaults
==============================
diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst
index fb0320474..17ec9416c 100644
--- a/doc/build/core/engines.rst
+++ b/doc/build/core/engines.rst
@@ -151,9 +151,14 @@ For a relative file path, this requires three slashes::
# where <path> is relative:
engine = create_engine('sqlite:///foo.db')
-And for an absolute file path, *four* slashes are used::
+And for an absolute file path, the three slashes are followed by the absolute path::
+ #Unix/Mac - 4 initial slashes in total
engine = create_engine('sqlite:////absolute/path/to/foo.db')
+ #Windows
+ engine = create_engine('sqlite:///C:\\path\\to\\foo.db')
+ #Windows alternative using raw string
+ engine = create_engine(r'sqlite:///C:\path\to\foo.db')
To use a SQLite ``:memory:`` database, specify an empty URL::
diff --git a/doc/build/core/sqlelement.rst b/doc/build/core/sqlelement.rst
index 61600e927..44a969dbb 100644
--- a/doc/build/core/sqlelement.rst
+++ b/doc/build/core/sqlelement.rst
@@ -35,6 +35,8 @@ used to construct any kind of typed SQL expression.
.. autodata:: func
+.. autofunction:: funcfilter
+
.. autofunction:: label
.. autofunction:: literal
@@ -109,6 +111,9 @@ used to construct any kind of typed SQL expression.
.. autoclass:: sqlalchemy.sql.elements.False_
:members:
+.. autoclass:: FunctionFilter
+ :members:
+
.. autoclass:: Label
:members:
diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst
index da2699b2b..463f00612 100644
--- a/doc/build/dialects/index.rst
+++ b/doc/build/dialects/index.rst
@@ -50,6 +50,7 @@ Production Ready
developed jointly by IBM and SQLAlchemy developers.
* `redshift-sqlalchemy <https://pypi.python.org/pypi/redshift-sqlalchemy>`_ - driver for Amazon Redshift, adapts
the existing Postgresql/psycopg2 driver.
+* `sqlalchemy_exasol <https://github.com/blue-yonder/sqlalchemy_exasol>`_ - driver for EXASolution.
* `sqlalchemy-sqlany <https://github.com/sqlanywhere/sqlalchemy-sqlany>`_ - driver for SAP Sybase SQL
Anywhere, developed by SAP.
* `sqlalchemy-monetdb <https://github.com/gijzelaerr/sqlalchemy-monetdb>`_ - driver for MonetDB.
diff --git a/doc/build/dialects/sqlite.rst b/doc/build/dialects/sqlite.rst
index 21fd4e3aa..a18b0ba7b 100644
--- a/doc/build/dialects/sqlite.rst
+++ b/doc/build/dialects/sqlite.rst
@@ -28,4 +28,9 @@ they originate from :mod:`sqlalchemy.types` or from the local dialect::
Pysqlite
--------
-.. automodule:: sqlalchemy.dialects.sqlite.pysqlite \ No newline at end of file
+.. automodule:: sqlalchemy.dialects.sqlite.pysqlite
+
+Pysqlcipher
+-----------
+
+.. automodule:: sqlalchemy.dialects.sqlite.pysqlcipher \ No newline at end of file
diff --git a/doc/build/orm/extensions/declarative.rst b/doc/build/orm/extensions/declarative.rst
index 636bb451b..7d9e634b5 100644
--- a/doc/build/orm/extensions/declarative.rst
+++ b/doc/build/orm/extensions/declarative.rst
@@ -13,6 +13,7 @@ API Reference
.. autofunction:: as_declarative
.. autoclass:: declared_attr
+ :members:
.. autofunction:: sqlalchemy.ext.declarative.api._declarative_constructor
diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst
index 642f3420c..9f01a3e24 100644
--- a/doc/build/orm/inheritance.rst
+++ b/doc/build/orm/inheritance.rst
@@ -45,6 +45,12 @@ this column is to act as the **discriminator**, and stores a value
which indicates the type of object represented within the row. The column may
be of any datatype, though string and integer are the most common.
+.. warning::
+
+ Currently, **only one discriminator column may be set**, typically
+ on the base-most class in the hierarchy. "Cascading" polymorphic columns
+ are not yet supported.
+
The discriminator column is only needed if polymorphic loading is
desired, as is usually the case. It is not strictly necessary that
it be present directly on the base mapped table, and can instead be defined on a
diff --git a/doc/build/orm/relationships.rst b/doc/build/orm/relationships.rst
index c65f06cbc..f512251a7 100644
--- a/doc/build/orm/relationships.rst
+++ b/doc/build/orm/relationships.rst
@@ -1079,12 +1079,15 @@ The above relationship will produce a join like::
ON host_entry_1.ip_address = CAST(host_entry.content AS INET)
An alternative syntax to the above is to use the :func:`.foreign` and
-:func:`.remote` :term:`annotations`, inline within the :paramref:`~.relationship.primaryjoin` expression.
+:func:`.remote` :term:`annotations`,
+inline within the :paramref:`~.relationship.primaryjoin` expression.
This syntax represents the annotations that :func:`.relationship` normally
applies by itself to the join condition given the :paramref:`~.relationship.foreign_keys` and
-:paramref:`~.relationship.remote_side` arguments; the functions are provided in the API in the
-rare case that :func:`.relationship` can't determine the exact location
-of these features on its own::
+:paramref:`~.relationship.remote_side` arguments. These functions may
+be more succinct when an explicit join condition is present, and additionally
+serve to mark exactly the column that is "foreign" or "remote" independent
+of whether that column is stated multiple times or within complex
+SQL expressions::
from sqlalchemy.orm import foreign, remote
@@ -1157,6 +1160,130 @@ Will render as::
flag to assist in the creation of :func:`.relationship` constructs using
custom operators.
+.. _relationship_overlapping_foreignkeys:
+
+Overlapping Foreign Keys
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+A rare scenario can arise when composite foreign keys are used, such that
+a single column may be the subject of more than one column
+referred to via foreign key constraint.
+
+Consider an (admittedly complex) mapping such as the ``Magazine`` object,
+referred to both by the ``Writer`` object and the ``Article`` object
+using a composite primary key scheme that includes ``magazine_id``
+for both; then to make ``Article`` refer to ``Writer`` as well,
+``Article.magazine_id`` is involved in two separate relationships;
+``Article.magazine`` and ``Article.writer``::
+
+ class Magazine(Base):
+ __tablename__ = 'magazine'
+
+ id = Column(Integer, primary_key=True)
+
+
+ class Article(Base):
+ __tablename__ = 'article'
+
+ article_id = Column(Integer)
+ magazine_id = Column(ForeignKey('magazine.id'))
+ writer_id = Column()
+
+ magazine = relationship("Magazine")
+ writer = relationship("Writer")
+
+ __table_args__ = (
+ PrimaryKeyConstraint('article_id', 'magazine_id'),
+ ForeignKeyConstraint(
+ ['writer_id', 'magazine_id'],
+ ['writer.id', 'writer.magazine_id']
+ ),
+ )
+
+
+ class Writer(Base):
+ __tablename__ = 'writer'
+
+ id = Column(Integer, primary_key=True)
+ magazine_id = Column(ForeignKey('magazine.id'), primary_key=True)
+ magazine = relationship("Magazine")
+
+When the above mapping is configured, we will see this warning emitted::
+
+ SAWarning: relationship 'Article.writer' will copy column
+ writer.magazine_id to column article.magazine_id,
+ which conflicts with relationship(s): 'Article.magazine'
+ (copies magazine.id to article.magazine_id). Consider applying
+ viewonly=True to read-only relationships, or provide a primaryjoin
+ condition marking writable columns with the foreign() annotation.
+
+What this refers to originates from the fact that ``Article.magazine_id`` is
+the subject of two different foreign key constraints; it refers to
+``Magazine.id`` directly as a source column, but also refers to
+``Writer.magazine_id`` as a source column in the context of the
+composite key to ``Writer``. If we associate an ``Article`` with a
+particular ``Magazine``, but then associate the ``Article`` with a
+``Writer`` that's associated with a *different* ``Magazine``, the ORM
+will overwrite ``Article.magazine_id`` non-deterministically, silently
+changing which magazine we refer towards; it may
+also attempt to place NULL into this columnn if we de-associate a
+``Writer`` from an ``Article``. The warning lets us know this is the case.
+
+To solve this, we need to break out the behavior of ``Article`` to include
+all three of the following features:
+
+1. ``Article`` first and foremost writes to
+ ``Article.magazine_id`` based on data persisted in the ``Article.magazine``
+ relationship only, that is a value copied from ``Magazine.id``.
+
+2. ``Article`` can write to ``Article.writer_id`` on behalf of data
+ persisted in the ``Article.writer`` relationship, but only the
+ ``Writer.id`` column; the ``Writer.magazine_id`` column should not
+ be written into ``Article.magazine_id`` as it ultimately is sourced
+ from ``Magazine.id``.
+
+3. ``Article`` takes ``Article.magazine_id`` into account when loading
+ ``Article.writer``, even though it *doesn't* write to it on behalf
+ of this relationship.
+
+To get just #1 and #2, we could specify only ``Article.writer_id`` as the
+"foreign keys" for ``Article.writer``::
+
+ class Article(Base):
+ # ...
+
+ writer = relationship("Writer", foreign_keys='Article.writer_id')
+
+However, this has the effect of ``Article.writer`` not taking
+``Article.magazine_id`` into account when querying against ``Writer``:
+
+.. sourcecode:: sql
+
+ SELECT article.article_id AS article_article_id,
+ article.magazine_id AS article_magazine_id,
+ article.writer_id AS article_writer_id
+ FROM article
+ JOIN writer ON writer.id = article.writer_id
+
+Therefore, to get at all of #1, #2, and #3, we express the join condition
+as well as which columns to be written by combining
+:paramref:`~.relationship.primaryjoin` fully, along with either the
+:paramref:`~.relationship.foreign_keys` argument, or more succinctly by
+annotating with :func:`~.orm.foreign`::
+
+ class Article(Base):
+ # ...
+
+ writer = relationship(
+ "Writer",
+ primaryjoin="and_(Writer.id == foreign(Article.writer_id), "
+ "Writer.magazine_id == Article.magazine_id)")
+
+.. versionchanged:: 1.0.0 the ORM will attempt to warn when a column is used
+ as the synchronization target from more than one relationship
+ simultaneously.
+
+
Non-relational Comparisons / Materialized Path
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 853566172..d184e1fbf 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -25,6 +25,7 @@ from .sql import (
extract,
false,
func,
+ funcfilter,
insert,
intersect,
intersect_all,
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index ba3050ae5..dad02ee0f 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -846,7 +846,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
"SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer. format_table(
self.compiled.statement.table)))
- except:
+ except Exception:
pass
def get_result_proxy(self):
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index 8f76336ae..b5a1bc566 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -63,7 +63,7 @@ class MSDialect_pymssql(MSDialect):
def _get_server_version_info(self, connection):
vers = connection.scalar("select @@version")
m = re.match(
- r"Microsoft SQL Server.*? - (\d+).(\d+).(\d+).(\d+)", vers)
+ r"Microsoft .*? - (\d+).(\d+).(\d+).(\d+)", vers)
if m:
return tuple(int(x) for x in m.group(1, 2, 3, 4))
else:
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 7ccd59abb..2fb054d0c 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -341,6 +341,29 @@ reflection will not include foreign keys. For these tables, you may supply a
:ref:`mysql_storage_engines`
+.. _mysql_unique_constraints:
+
+MySQL Unique Constraints and Reflection
+---------------------------------------
+
+SQLAlchemy supports both the :class:`.Index` construct with the
+flag ``unique=True``, indicating a UNIQUE index, as well as the
+:class:`.UniqueConstraint` construct, representing a UNIQUE constraint.
+Both objects/syntaxes are supported by MySQL when emitting DDL to create
+these constraints. However, MySQL does not have a unique constraint
+construct that is separate from a unique index; that is, the "UNIQUE"
+constraint on MySQL is equivalent to creating a "UNIQUE INDEX".
+
+When reflecting these constructs, the :meth:`.Inspector.get_indexes`
+and the :meth:`.Inspector.get_unique_constraints` methods will **both**
+return an entry for a UNIQUE index in MySQL. However, when performing
+full table reflection using ``Table(..., autoload=True)``,
+the :class:`.UniqueConstraint` construct is
+**not** part of the fully reflected :class:`.Table` construct under any
+circumstances; this construct is always represented by a :class:`.Index`
+with the ``unique=True`` setting present in the :attr:`.Table.indexes`
+collection.
+
.. _mysql_timestamp_null:
@@ -2317,7 +2340,7 @@ class MySQLDialect(default.DefaultDialect):
# basic operations via autocommit fail.
try:
dbapi_connection.commit()
- except:
+ except Exception:
if self.server_version_info < (3, 23, 15):
args = sys.exc_info()[1].args
if args and args[0] == 1064:
@@ -2329,7 +2352,7 @@ class MySQLDialect(default.DefaultDialect):
try:
dbapi_connection.rollback()
- except:
+ except Exception:
if self.server_version_info < (3, 23, 15):
args = sys.exc_info()[1].args
if args and args[0] == 1064:
@@ -2590,7 +2613,8 @@ class MySQLDialect(default.DefaultDialect):
return [
{
'name': key['name'],
- 'column_names': [col[0] for col in key['columns']]
+ 'column_names': [col[0] for col in key['columns']],
+ 'duplicates_index': key['name'],
}
for key in parsed_state.keys
if key['type'] == 'UNIQUE'
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index e51e80005..417e1ad6f 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -21,6 +21,7 @@ from .base import (MySQLDialect, MySQLExecutionContext,
BIT)
from ... import util
+import re
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
@@ -31,18 +32,34 @@ class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
- return self.process(binary.left, **kw) + " %% " + \
- self.process(binary.right, **kw)
+ if self.dialect._mysqlconnector_double_percents:
+ return self.process(binary.left, **kw) + " %% " + \
+ self.process(binary.right, **kw)
+ else:
+ return self.process(binary.left, **kw) + " % " + \
+ self.process(binary.right, **kw)
def post_process_text(self, text):
- return text.replace('%', '%%')
+ if self.dialect._mysqlconnector_double_percents:
+ return text.replace('%', '%%')
+ else:
+ return text
+
+ def escape_literal_column(self, text):
+ if self.dialect._mysqlconnector_double_percents:
+ return text.replace('%', '%%')
+ else:
+ return text
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
- return value.replace("%", "%%")
+ if self.dialect._mysqlconnector_double_percents:
+ return value.replace("%", "%%")
+ else:
+ return value
class _myconnpyBIT(BIT):
@@ -55,8 +72,6 @@ class _myconnpyBIT(BIT):
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = 'mysqlconnector'
- if util.py2k:
- supports_unicode_statements = False
supports_unicode_binds = True
supports_sane_rowcount = True
@@ -77,6 +92,10 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
}
)
+ @util.memoized_property
+ def supports_unicode_statements(self):
+ return util.py3k or self._mysqlconnector_version_info > (2, 0)
+
@classmethod
def dbapi(cls):
from mysql import connector
@@ -103,10 +122,25 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
'client_flags', ClientFlag.get_default())
client_flags |= ClientFlag.FOUND_ROWS
opts['client_flags'] = client_flags
- except:
+ except Exception:
pass
return [[], opts]
+ @util.memoized_property
+ def _mysqlconnector_version_info(self):
+ if self.dbapi and hasattr(self.dbapi, '__version__'):
+ m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
+ self.dbapi.__version__)
+ if m:
+ return tuple(
+ int(x)
+ for x in m.group(1, 2, 3)
+ if x is not None)
+
+ @util.memoized_property
+ def _mysqlconnector_double_percents(self):
+ return not util.py3k and self._mysqlconnector_version_info < (2, 0)
+
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = dbapi_con.get_server_version()
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 81a9f1a95..6df38e57e 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -213,6 +213,21 @@ is reflected and the type is reported as ``DATE``, the time-supporting
examining the type of column for use in special Python translations or
for migrating schemas to other database backends.
+Oracle Table Options
+-------------------------
+
+The CREATE TABLE phrase supports the following options with Oracle
+in conjunction with the :class:`.Table` construct:
+
+
+* ``ON COMMIT``::
+
+ Table(
+ "some_table", metadata, ...,
+ prefixes=['GLOBAL TEMPORARY'], oracle_on_commit='PRESERVE ROWS')
+
+.. versionadded:: 1.0.0
+
"""
import re
@@ -784,11 +799,22 @@ class OracleDDLCompiler(compiler.DDLCompiler):
return super(OracleDDLCompiler, self).\
visit_create_index(create, include_schema=True)
+ def post_create_table(self, table):
+ table_opts = []
+ opts = table.dialect_options['oracle']
+
+ if opts['on_commit']:
+ on_commit_options = opts['on_commit'].replace("_", " ").upper()
+ table_opts.append('\n ON COMMIT %s' % on_commit_options)
+
+ return ''.join(table_opts)
+
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = set([x.lower() for x in RESERVED_WORDS])
- illegal_initial_characters = set(range(0, 10)).union(["_", "$"])
+ illegal_initial_characters = set(
+ (str(dig) for dig in range(0, 10))).union(["_", "$"])
def _bindparam_requires_quotes(self, value):
"""Return True if the given identifier requires quoting."""
@@ -842,7 +868,10 @@ class OracleDialect(default.DefaultDialect):
reflection_options = ('oracle_resolve_synonyms', )
construct_arguments = [
- (sa_schema.Table, {"resolve_synonyms": False})
+ (sa_schema.Table, {
+ "resolve_synonyms": False,
+ "on_commit": None
+ })
]
def __init__(self,
@@ -1029,7 +1058,21 @@ class OracleDialect(default.DefaultDialect):
"WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
"('SYSTEM', 'SYSAUX') "
"AND OWNER = :owner "
- "AND IOT_NAME IS NULL")
+ "AND IOT_NAME IS NULL "
+ "AND DURATION IS NULL")
+ cursor = connection.execute(s, owner=schema)
+ return [self.normalize_name(row[0]) for row in cursor]
+
+ @reflection.cache
+ def get_temp_table_names(self, connection, **kw):
+ schema = self.denormalize_name(self.default_schema_name)
+ s = sql.text(
+ "SELECT table_name FROM all_tables "
+ "WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
+ "('SYSTEM', 'SYSAUX') "
+ "AND OWNER = :owner "
+ "AND IOT_NAME IS NULL "
+ "AND DURATION IS NOT NULL")
cursor = connection.execute(s, owner=schema)
return [self.normalize_name(row[0]) for row in cursor]
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 575d2a6dd..baa640eaa 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -401,6 +401,29 @@ The value passed to the keyword argument will be simply passed through to the
underlying CREATE INDEX command, so it *must* be a valid index type for your
version of PostgreSQL.
+
+.. _postgresql_index_reflection:
+
+Postgresql Index Reflection
+---------------------------
+
+The Postgresql database creates a UNIQUE INDEX implicitly whenever the
+UNIQUE CONSTRAINT construct is used. When inspecting a table using
+:class:`.Inspector`, the :meth:`.Inspector.get_indexes`
+and the :meth:`.Inspector.get_unique_constraints` will report on these
+two constructs distinctly; in the case of the index, the key
+``duplicates_constraint`` will be present in the index entry if it is
+detected as mirroring a constraint. When performing reflection using
+``Table(..., autoload=True)``, the UNIQUE INDEX is **not** returned
+in :attr:`.Table.indexes` when it is detected as mirroring a
+:class:`.UniqueConstraint` in the :attr:`.Table.constraints` collection.
+
+.. versionchanged:: 1.0.0 - :class:`.Table` reflection now includes
+ :class:`.UniqueConstraint` objects present in the :attr:`.Table.constraints`
+ collection; the Postgresql backend will no longer include a "mirrored"
+ :class:`.Index` construct in :attr:`.Table.indexes` if it is detected
+ as corresponding to a unique constraint.
+
Special Reflection Options
--------------------------
@@ -1679,6 +1702,19 @@ class PGInspector(reflection.Inspector):
schema = schema or self.default_schema_name
return self.dialect._load_enums(self.bind, schema)
+ def get_foreign_table_names(self, schema=None):
+ """Return a list of FOREIGN TABLE names.
+
+ Behavior is similar to that of :meth:`.Inspector.get_table_names`,
+ except that the list is limited to those tables tha report a
+ ``relkind`` value of ``f``.
+
+ .. versionadded:: 1.0.0
+
+ """
+ schema = schema or self.default_schema_name
+ return self.dialect._get_foreign_table_names(self.bind, schema)
+
class CreateEnumType(schema._CreateDropBase):
__visit_name__ = "create_enum_type"
@@ -2024,7 +2060,7 @@ class PGDialect(default.DefaultDialect):
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE (%s)
- AND c.relname = :table_name AND c.relkind in ('r','v')
+ AND c.relname = :table_name AND c.relkind in ('r', 'v', 'm', 'f')
""" % schema_where_clause
# Since we're binding to unicode, table_name and schema_name must be
# unicode.
@@ -2078,6 +2114,24 @@ class PGDialect(default.DefaultDialect):
return [row[0] for row in result]
@reflection.cache
+ def _get_foreign_table_names(self, connection, schema=None, **kw):
+ if schema is not None:
+ current_schema = schema
+ else:
+ current_schema = self.default_schema_name
+
+ result = connection.execute(
+ sql.text("SELECT relname FROM pg_class c "
+ "WHERE relkind = 'f' "
+ "AND '%s' = (select nspname from pg_namespace n "
+ "where n.oid = c.relnamespace) " %
+ current_schema,
+ typemap={'relname': sqltypes.Unicode}
+ )
+ )
+ return [row[0] for row in result]
+
+ @reflection.cache
def get_view_names(self, connection, schema=None, **kw):
if schema is not None:
current_schema = schema
@@ -2086,7 +2140,7 @@ class PGDialect(default.DefaultDialect):
s = """
SELECT relname
FROM pg_class c
- WHERE relkind = 'v'
+ WHERE relkind IN ('m', 'v')
AND '%(schema)s' = (select nspname from pg_namespace n
where n.oid = c.relnamespace)
""" % dict(schema=current_schema)
@@ -2439,16 +2493,21 @@ class PGDialect(default.DefaultDialect):
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
- a.attname, a.attnum, ix.indkey%s
+ a.attname, a.attnum, c.conrelid, ix.indkey%s
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
- join pg_class i on i.oid=ix.indexrelid
+ join pg_class i on i.oid = ix.indexrelid
left outer join
pg_attribute a
- on t.oid=a.attrelid and %s
+ on t.oid = a.attrelid and %s
+ left outer join
+ pg_constraint c
+ on (ix.indrelid = c.conrelid and
+ ix.indexrelid = c.conindid and
+ c.contype in ('p', 'u', 'x'))
WHERE
- t.relkind = 'r'
+ t.relkind IN ('r', 'v', 'f', 'm')
and t.oid = :table_oid
and ix.indisprimary = 'f'
ORDER BY
@@ -2469,7 +2528,7 @@ class PGDialect(default.DefaultDialect):
sv_idx_name = None
for row in c.fetchall():
- idx_name, unique, expr, prd, col, col_num, idx_key = row
+ idx_name, unique, expr, prd, col, col_num, conrelid, idx_key = row
if expr:
if idx_name != sv_idx_name:
@@ -2486,18 +2545,27 @@ class PGDialect(default.DefaultDialect):
% idx_name)
sv_idx_name = idx_name
+ has_idx = idx_name in indexes
index = indexes[idx_name]
if col is not None:
index['cols'][col_num] = col
- index['key'] = [int(k.strip()) for k in idx_key.split()]
- index['unique'] = unique
-
- return [
- {'name': name,
- 'unique': idx['unique'],
- 'column_names': [idx['cols'][i] for i in idx['key']]}
- for name, idx in indexes.items()
- ]
+ if not has_idx:
+ index['key'] = [int(k.strip()) for k in idx_key.split()]
+ index['unique'] = unique
+ if conrelid is not None:
+ index['duplicates_constraint'] = idx_name
+
+ result = []
+ for name, idx in indexes.items():
+ entry = {
+ 'name': name,
+ 'unique': idx['unique'],
+ 'column_names': [idx['cols'][i] for i in idx['key']]
+ }
+ if 'duplicates_constraint' in idx:
+ entry['duplicates_constraint'] = idx['duplicates_constraint']
+ result.append(entry)
+ return result
@reflection.cache
def get_unique_constraints(self, connection, table_name,
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index e6450c97f..1a2a1ffe4 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -32,10 +32,25 @@ psycopg2-specific keyword arguments which are accepted by
way of enabling this mode on a per-execution basis.
* ``use_native_unicode``: Enable the usage of Psycopg2 "native unicode" mode
per connection. True by default.
+
+ .. seealso::
+
+ :ref:`psycopg2_disable_native_unicode`
+
* ``isolation_level``: This option, available for all PostgreSQL dialects,
includes the ``AUTOCOMMIT`` isolation level when using the psycopg2
- dialect. See :ref:`psycopg2_isolation_level`.
+ dialect.
+
+ .. seealso::
+
+ :ref:`psycopg2_isolation_level`
+
+* ``client_encoding``: sets the client encoding in a libpq-agnostic way,
+ using psycopg2's ``set_client_encoding()`` method.
+
+ .. seealso::
+ :ref:`psycopg2_unicode`
Unix Domain Connections
------------------------
@@ -75,8 +90,10 @@ The following DBAPI-specific options are respected when used with
If ``None`` or not set, the ``server_side_cursors`` option of the
:class:`.Engine` is used.
-Unicode
--------
+.. _psycopg2_unicode:
+
+Unicode with Psycopg2
+----------------------
By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
extension, such that the DBAPI receives and returns all strings as Python
@@ -84,27 +101,51 @@ Unicode objects directly - SQLAlchemy passes these values through without
change. Psycopg2 here will encode/decode string values based on the
current "client encoding" setting; by default this is the value in
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
-Typically, this can be changed to ``utf-8``, as a more useful default::
+Typically, this can be changed to ``utf8``, as a more useful default::
+
+ # postgresql.conf file
- #client_encoding = sql_ascii # actually, defaults to database
+ # client_encoding = sql_ascii # actually, defaults to database
# encoding
client_encoding = utf8
A second way to affect the client encoding is to set it within Psycopg2
-locally. SQLAlchemy will call psycopg2's ``set_client_encoding()``
-method (see:
-http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding)
+locally. SQLAlchemy will call psycopg2's
+:meth:`psycopg2:connection.set_client_encoding` method
on all new connections based on the value passed to
:func:`.create_engine` using the ``client_encoding`` parameter::
+ # set_client_encoding() setting;
+ # works for *all* Postgresql versions
engine = create_engine("postgresql://user:pass@host/dbname",
client_encoding='utf8')
This overrides the encoding specified in the Postgresql client configuration.
+When using the parameter in this way, the psycopg2 driver emits
+``SET client_encoding TO 'utf8'`` on the connection explicitly, and works
+in all Postgresql versions.
+
+Note that the ``client_encoding`` setting as passed to :func:`.create_engine`
+is **not the same** as the more recently added ``client_encoding`` parameter
+now supported by libpq directly. This is enabled when ``client_encoding``
+is passed directly to ``psycopg2.connect()``, and from SQLAlchemy is passed
+using the :paramref:`.create_engine.connect_args` parameter::
+
+ # libpq direct parameter setting;
+ # only works for Postgresql **9.1 and above**
+ engine = create_engine("postgresql://user:pass@host/dbname",
+ connect_args={'client_encoding': 'utf8'})
+
+ # using the query string is equivalent
+ engine = create_engine("postgresql://user:pass@host/dbname?client_encoding=utf8")
+
+The above parameter was only added to libpq as of version 9.1 of Postgresql,
+so using the previous method is better for cross-version support.
+
+.. _psycopg2_disable_native_unicode:
-.. versionadded:: 0.7.3
- The psycopg2-specific ``client_encoding`` parameter to
- :func:`.create_engine`.
+Disabling Native Unicode
+^^^^^^^^^^^^^^^^^^^^^^^^
SQLAlchemy can also be instructed to skip the usage of the psycopg2
``UNICODE`` extension and to instead utilize its own unicode encode/decode
@@ -116,8 +157,7 @@ in and coerce from bytes on the way back,
using the value of the :func:`.create_engine` ``encoding`` parameter, which
defaults to ``utf-8``.
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
-obsolete as more DBAPIs support unicode fully along with the approach of
-Python 3; in modern usage psycopg2 should be relied upon to handle unicode.
+obsolete as most DBAPIs now support unicode fully.
Transactions
------------
@@ -512,12 +552,14 @@ class PGDialect_psycopg2(PGDialect):
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.Error):
# check the "closed" flag. this might not be
- # present on old psycopg2 versions
+ # present on old psycopg2 versions. Also,
+ # this flag doesn't actually help in a lot of disconnect
+ # situations, so don't rely on it.
if getattr(connection, 'closed', False):
return True
- # legacy checks based on strings. the "closed" check
- # above most likely obviates the need for any of these.
+ # checks based on strings. in the case that .closed
+ # didn't cut it, fall back onto these.
str_e = str(e).partition("\n")[0]
for msg in [
# these error messages from libpq: interfaces/libpq/fe-misc.c
@@ -534,8 +576,10 @@ class PGDialect_psycopg2(PGDialect):
# not sure where this path is originally from, it may
# be obsolete. It really says "losed", not "closed".
'losed the connection unexpectedly',
- # this can occur in newer SSL
- 'connection has been closed unexpectedly'
+ # these can occur in newer SSL
+ 'connection has been closed unexpectedly',
+ 'SSL SYSCALL error: Bad file descriptor',
+ 'SSL SYSCALL error: EOF detected',
]:
idx = str_e.find(msg)
if idx >= 0 and '"' not in str_e[:idx]:
diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py
index 0eceaa537..a53d53e9d 100644
--- a/lib/sqlalchemy/dialects/sqlite/__init__.py
+++ b/lib/sqlalchemy/dialects/sqlite/__init__.py
@@ -5,7 +5,7 @@
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-from sqlalchemy.dialects.sqlite import base, pysqlite
+from sqlalchemy.dialects.sqlite import base, pysqlite, pysqlcipher
# default dialect
base.dialect = pysqlite.dialect
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index af793d275..335b35c94 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -713,10 +713,12 @@ class SQLiteExecutionContext(default.DefaultExecutionContext):
return self.execution_options.get("sqlite_raw_colnames", False)
def _translate_colname(self, colname):
- # adjust for dotted column names. SQLite in the case of UNION may
- # store col names as "tablename.colname" in cursor.description
+ # adjust for dotted column names. SQLite
+ # in the case of UNION may store col names as
+ # "tablename.colname", or if using an attached database,
+ # "database.tablename.colname", in cursor.description
if not self._preserve_raw_colnames and "." in colname:
- return colname.split(".")[1], colname
+ return colname.split(".")[-1], colname
else:
return colname, None
@@ -829,20 +831,26 @@ class SQLiteDialect(default.DefaultDialect):
if schema is not None:
qschema = self.identifier_preparer.quote_identifier(schema)
master = '%s.sqlite_master' % qschema
- s = ("SELECT name FROM %s "
- "WHERE type='table' ORDER BY name") % (master,)
- rs = connection.execute(s)
else:
- try:
- s = ("SELECT name FROM "
- " (SELECT * FROM sqlite_master UNION ALL "
- " SELECT * FROM sqlite_temp_master) "
- "WHERE type='table' ORDER BY name")
- rs = connection.execute(s)
- except exc.DBAPIError:
- s = ("SELECT name FROM sqlite_master "
- "WHERE type='table' ORDER BY name")
- rs = connection.execute(s)
+ master = "sqlite_master"
+ s = ("SELECT name FROM %s "
+ "WHERE type='table' ORDER BY name") % (master,)
+ rs = connection.execute(s)
+ return [row[0] for row in rs]
+
+ @reflection.cache
+ def get_temp_table_names(self, connection, **kw):
+ s = "SELECT name FROM sqlite_temp_master "\
+ "WHERE type='table' ORDER BY name "
+ rs = connection.execute(s)
+
+ return [row[0] for row in rs]
+
+ @reflection.cache
+ def get_temp_view_names(self, connection, **kw):
+ s = "SELECT name FROM sqlite_temp_master "\
+ "WHERE type='view' ORDER BY name "
+ rs = connection.execute(s)
return [row[0] for row in rs]
@@ -869,20 +877,11 @@ class SQLiteDialect(default.DefaultDialect):
if schema is not None:
qschema = self.identifier_preparer.quote_identifier(schema)
master = '%s.sqlite_master' % qschema
- s = ("SELECT name FROM %s "
- "WHERE type='view' ORDER BY name") % (master,)
- rs = connection.execute(s)
else:
- try:
- s = ("SELECT name FROM "
- " (SELECT * FROM sqlite_master UNION ALL "
- " SELECT * FROM sqlite_temp_master) "
- "WHERE type='view' ORDER BY name")
- rs = connection.execute(s)
- except exc.DBAPIError:
- s = ("SELECT name FROM sqlite_master "
- "WHERE type='view' ORDER BY name")
- rs = connection.execute(s)
+ master = "sqlite_master"
+ s = ("SELECT name FROM %s "
+ "WHERE type='view' ORDER BY name") % (master,)
+ rs = connection.execute(s)
return [row[0] for row in rs]
@@ -1097,16 +1096,24 @@ class SQLiteDialect(default.DefaultDialect):
@reflection.cache
def get_unique_constraints(self, connection, table_name,
schema=None, **kw):
- UNIQUE_SQL = """
- SELECT sql
- FROM
- sqlite_master
- WHERE
- type='table' AND
- name=:table_name
- """
- c = connection.execute(UNIQUE_SQL, table_name=table_name)
- table_data = c.fetchone()[0]
+ try:
+ s = ("SELECT sql FROM "
+ " (SELECT * FROM sqlite_master UNION ALL "
+ " SELECT * FROM sqlite_temp_master) "
+ "WHERE name = '%s' "
+ "AND type = 'table'") % table_name
+ rs = connection.execute(s)
+ except exc.DBAPIError:
+ s = ("SELECT sql FROM sqlite_master WHERE name = '%s' "
+ "AND type = 'table'") % table_name
+ rs = connection.execute(s)
+ row = rs.fetchone()
+ if row is None:
+ # sqlite won't return the schema for the sqlite_master or
+ # sqlite_temp_master tables from this query. These tables
+ # don't have any unique constraints anyway.
+ return []
+ table_data = row[0]
UNIQUE_PATTERN = 'CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)'
return [
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py
new file mode 100644
index 000000000..3c55a1de7
--- /dev/null
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlcipher.py
@@ -0,0 +1,116 @@
+# sqlite/pysqlcipher.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+.. dialect:: sqlite+pysqlcipher
+ :name: pysqlcipher
+ :dbapi: pysqlcipher
+ :connectstring: sqlite+pysqlcipher://:passphrase/file_path[?kdf_iter=<iter>]
+ :url: https://pypi.python.org/pypi/pysqlcipher
+
+ ``pysqlcipher`` is a fork of the standard ``pysqlite`` driver to make
+ use of the `SQLCipher <https://www.zetetic.net/sqlcipher>`_ backend.
+
+ .. versionadded:: 0.9.9
+
+Driver
+------
+
+The driver here is the `pysqlcipher <https://pypi.python.org/pypi/pysqlcipher>`_
+driver, which makes use of the SQLCipher engine. This system essentially
+introduces new PRAGMA commands to SQLite which allows the setting of a
+passphrase and other encryption parameters, allowing the database
+file to be encrypted.
+
+Connect Strings
+---------------
+
+The format of the connect string is in every way the same as that
+of the :mod:`~sqlalchemy.dialects.sqlite.pysqlite` driver, except that the
+"password" field is now accepted, which should contain a passphrase::
+
+ e = create_engine('sqlite+pysqlcipher://:testing@/foo.db')
+
+For an absolute file path, two leading slashes should be used for the
+database name::
+
+ e = create_engine('sqlite+pysqlcipher://:testing@//path/to/foo.db')
+
+A selection of additional encryption-related pragmas supported by SQLCipher
+as documented at https://www.zetetic.net/sqlcipher/sqlcipher-api/ can be passed
+in the query string, and will result in that PRAGMA being called for each
+new connection. Currently, ``cipher``, ``kdf_iter``
+``cipher_page_size`` and ``cipher_use_hmac`` are supported::
+
+ e = create_engine('sqlite+pysqlcipher://:testing@/foo.db?cipher=aes-256-cfb&kdf_iter=64000')
+
+
+Pooling Behavior
+----------------
+
+The driver makes a change to the default pool behavior of pysqlite
+as described in :ref:`pysqlite_threading_pooling`. The pysqlcipher driver
+has been observed to be significantly slower on connection than the
+pysqlite driver, most likely due to the encryption overhead, so the
+dialect here defaults to using the :class:`.SingletonThreadPool`
+implementation,
+instead of the :class:`.NullPool` pool used by pysqlite. As always, the pool
+implementation is entirely configurable using the
+:paramref:`.create_engine.poolclass` parameter; the :class:`.StaticPool` may
+be more feasible for single-threaded use, or :class:`.NullPool` may be used
+to prevent unencrypted connections from being held open for long periods of
+time, at the expense of slower startup time for new connections.
+
+
+"""
+from __future__ import absolute_import
+from .pysqlite import SQLiteDialect_pysqlite
+from ...engine import url as _url
+from ... import pool
+
+
+class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite):
+ driver = 'pysqlcipher'
+
+ pragmas = ('kdf_iter', 'cipher', 'cipher_page_size', 'cipher_use_hmac')
+
+ @classmethod
+ def dbapi(cls):
+ from pysqlcipher import dbapi2 as sqlcipher
+ return sqlcipher
+
+ @classmethod
+ def get_pool_class(cls, url):
+ return pool.SingletonThreadPool
+
+ def connect(self, *cargs, **cparams):
+ passphrase = cparams.pop('passphrase', '')
+
+ pragmas = dict(
+ (key, cparams.pop(key, None)) for key in
+ self.pragmas
+ )
+
+ conn = super(SQLiteDialect_pysqlcipher, self).\
+ connect(*cargs, **cparams)
+ conn.execute('pragma key="%s"' % passphrase)
+ for prag, value in pragmas.items():
+ if value is not None:
+ conn.execute('pragma %s=%s' % (prag, value))
+
+ return conn
+
+ def create_connect_args(self, url):
+ super_url = _url.URL(
+ url.drivername, username=url.username,
+ host=url.host, database=url.database, query=url.query)
+ c_args, opts = super(SQLiteDialect_pysqlcipher, self).\
+ create_connect_args(super_url)
+ opts['passphrase'] = url.password
+ return c_args, opts
+
+dialect = SQLiteDialect_pysqlcipher
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index d2cc8890f..dd82be1d1 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -45,7 +45,7 @@ class Connection(Connectable):
"""
def __init__(self, engine, connection=None, close_with_result=False,
- _branch=False, _execution_options=None,
+ _branch_from=None, _execution_options=None,
_dispatch=None,
_has_events=None):
"""Construct a new Connection.
@@ -57,48 +57,80 @@ class Connection(Connectable):
"""
self.engine = engine
self.dialect = engine.dialect
- self.__connection = connection or engine.raw_connection()
- self.__transaction = None
- self.should_close_with_result = close_with_result
- self.__savepoint_seq = 0
- self.__branch = _branch
- self.__invalid = False
- self.__can_reconnect = True
- if _dispatch:
+ self.__branch_from = _branch_from
+ self.__branch = _branch_from is not None
+
+ if _branch_from:
+ self.__connection = connection
+ self._execution_options = _execution_options
+ self._echo = _branch_from._echo
+ self.should_close_with_result = False
self.dispatch = _dispatch
- elif _has_events is None:
- # if _has_events is sent explicitly as False,
- # then don't join the dispatch of the engine; we don't
- # want to handle any of the engine's events in that case.
- self.dispatch = self.dispatch._join(engine.dispatch)
- self._has_events = _has_events or (
- _has_events is None and engine._has_events)
-
- self._echo = self.engine._should_log_info()
- if _execution_options:
- self._execution_options =\
- engine._execution_options.union(_execution_options)
+ self._has_events = _branch_from._has_events
else:
+ self.__connection = connection \
+ if connection is not None else engine.raw_connection()
+ self.__transaction = None
+ self.__savepoint_seq = 0
+ self.should_close_with_result = close_with_result
+ self.__invalid = False
+ self.__can_reconnect = True
+ self._echo = self.engine._should_log_info()
+
+ if _has_events is None:
+ # if _has_events is sent explicitly as False,
+ # then don't join the dispatch of the engine; we don't
+ # want to handle any of the engine's events in that case.
+ self.dispatch = self.dispatch._join(engine.dispatch)
+ self._has_events = _has_events or (
+ _has_events is None and engine._has_events)
+
+ assert not _execution_options
self._execution_options = engine._execution_options
if self._has_events or self.engine._has_events:
- self.dispatch.engine_connect(self, _branch)
+ self.dispatch.engine_connect(self, self.__branch)
def _branch(self):
"""Return a new Connection which references this Connection's
engine and connection; but does not have close_with_result enabled,
and also whose close() method does nothing.
- This is used to execute "sub" statements within a single execution,
- usually an INSERT statement.
+ The Core uses this very sparingly, only in the case of
+ custom SQL default functions that are to be INSERTed as the
+ primary key of a row where we need to get the value back, so we have
+ to invoke it distinctly - this is a very uncommon case.
+
+ Userland code accesses _branch() when the connect() or
+ contextual_connect() methods are called. The branched connection
+ acts as much as possible like the parent, except that it stays
+ connected when a close() event occurs.
+
"""
+ if self.__branch_from:
+ return self.__branch_from._branch()
+ else:
+ return self.engine._connection_cls(
+ self.engine,
+ self.__connection,
+ _branch_from=self,
+ _execution_options=self._execution_options,
+ _has_events=self._has_events,
+ _dispatch=self.dispatch)
+
+ @property
+ def _root(self):
+ """return the 'root' connection.
- return self.engine._connection_cls(
- self.engine,
- self.__connection,
- _branch=True,
- _has_events=self._has_events,
- _dispatch=self.dispatch)
+ Returns 'self' if this connection is not a branch, else
+ returns the root connection from which we ultimately branched.
+
+ """
+
+ if self.__branch_from:
+ return self.__branch_from
+ else:
+ return self
def _clone(self):
"""Create a shallow copy of this Connection.
@@ -224,7 +256,7 @@ class Connection(Connectable):
def invalidated(self):
"""Return True if this connection was invalidated."""
- return self.__invalid
+ return self._root.__invalid
@property
def connection(self):
@@ -236,6 +268,9 @@ class Connection(Connectable):
return self._revalidate_connection()
def _revalidate_connection(self):
+ if self.__branch_from:
+ return self.__branch_from._revalidate_connection()
+
if self.__can_reconnect and self.__invalid:
if self.__transaction is not None:
raise exc.InvalidRequestError(
@@ -343,16 +378,17 @@ class Connection(Connectable):
:ref:`pool_connection_invalidation`
"""
+
if self.invalidated:
return
if self.closed:
raise exc.ResourceClosedError("This Connection is closed")
- if self._connection_is_valid:
- self.__connection.invalidate(exception)
- del self.__connection
- self.__invalid = True
+ if self._root._connection_is_valid:
+ self._root.__connection.invalidate(exception)
+ del self._root.__connection
+ self._root.__invalid = True
def detach(self):
"""Detach the underlying DB-API connection from its connection pool.
@@ -415,6 +451,8 @@ class Connection(Connectable):
:class:`.Engine`.
"""
+ if self.__branch_from:
+ return self.__branch_from.begin()
if self.__transaction is None:
self.__transaction = RootTransaction(self)
@@ -436,6 +474,9 @@ class Connection(Connectable):
See also :meth:`.Connection.begin`,
:meth:`.Connection.begin_twophase`.
"""
+ if self.__branch_from:
+ return self.__branch_from.begin_nested()
+
if self.__transaction is None:
self.__transaction = RootTransaction(self)
else:
@@ -459,6 +500,9 @@ class Connection(Connectable):
"""
+ if self.__branch_from:
+ return self.__branch_from.begin_twophase(xid=xid)
+
if self.__transaction is not None:
raise exc.InvalidRequestError(
"Cannot start a two phase transaction when a transaction "
@@ -479,10 +523,11 @@ class Connection(Connectable):
def in_transaction(self):
"""Return True if a transaction is in progress."""
-
- return self.__transaction is not None
+ return self._root.__transaction is not None
def _begin_impl(self, transaction):
+ assert not self.__branch_from
+
if self._echo:
self.engine.logger.info("BEGIN (implicit)")
@@ -497,6 +542,8 @@ class Connection(Connectable):
self._handle_dbapi_exception(e, None, None, None, None)
def _rollback_impl(self):
+ assert not self.__branch_from
+
if self._has_events or self.engine._has_events:
self.dispatch.rollback(self)
@@ -516,6 +563,8 @@ class Connection(Connectable):
self.__transaction = None
def _commit_impl(self, autocommit=False):
+ assert not self.__branch_from
+
if self._has_events or self.engine._has_events:
self.dispatch.commit(self)
@@ -532,6 +581,8 @@ class Connection(Connectable):
self.__transaction = None
def _savepoint_impl(self, name=None):
+ assert not self.__branch_from
+
if self._has_events or self.engine._has_events:
self.dispatch.savepoint(self, name)
@@ -543,6 +594,8 @@ class Connection(Connectable):
return name
def _rollback_to_savepoint_impl(self, name, context):
+ assert not self.__branch_from
+
if self._has_events or self.engine._has_events:
self.dispatch.rollback_savepoint(self, name, context)
@@ -551,6 +604,8 @@ class Connection(Connectable):
self.__transaction = context
def _release_savepoint_impl(self, name, context):
+ assert not self.__branch_from
+
if self._has_events or self.engine._has_events:
self.dispatch.release_savepoint(self, name, context)
@@ -559,6 +614,8 @@ class Connection(Connectable):
self.__transaction = context
def _begin_twophase_impl(self, transaction):
+ assert not self.__branch_from
+
if self._echo:
self.engine.logger.info("BEGIN TWOPHASE (implicit)")
if self._has_events or self.engine._has_events:
@@ -571,6 +628,8 @@ class Connection(Connectable):
self.connection._reset_agent = transaction
def _prepare_twophase_impl(self, xid):
+ assert not self.__branch_from
+
if self._has_events or self.engine._has_events:
self.dispatch.prepare_twophase(self, xid)
@@ -579,6 +638,8 @@ class Connection(Connectable):
self.engine.dialect.do_prepare_twophase(self, xid)
def _rollback_twophase_impl(self, xid, is_prepared):
+ assert not self.__branch_from
+
if self._has_events or self.engine._has_events:
self.dispatch.rollback_twophase(self, xid, is_prepared)
@@ -595,6 +656,8 @@ class Connection(Connectable):
self.__transaction = None
def _commit_twophase_impl(self, xid, is_prepared):
+ assert not self.__branch_from
+
if self._has_events or self.engine._has_events:
self.dispatch.commit_twophase(self, xid, is_prepared)
@@ -610,8 +673,8 @@ class Connection(Connectable):
self.__transaction = None
def _autorollback(self):
- if not self.in_transaction():
- self._rollback_impl()
+ if not self._root.in_transaction():
+ self._root._rollback_impl()
def close(self):
"""Close this :class:`.Connection`.
@@ -632,13 +695,21 @@ class Connection(Connectable):
and will allow no further operations.
"""
+ if self.__branch_from:
+ try:
+ del self.__connection
+ except AttributeError:
+ pass
+ finally:
+ self.__can_reconnect = False
+ return
try:
conn = self.__connection
except AttributeError:
pass
else:
- if not self.__branch:
- conn.close()
+
+ conn.close()
if conn._reset_agent is self.__transaction:
conn._reset_agent = None
@@ -993,8 +1064,8 @@ class Connection(Connectable):
result.rowcount
result.close(_autoclose_connection=False)
- if self.__transaction is None and context.should_autocommit:
- self._commit_impl(autocommit=True)
+ if context.should_autocommit and self._root.__transaction is None:
+ self._root._commit_impl(autocommit=True)
if result.closed and self.should_close_with_result:
self.close()
@@ -1055,8 +1126,6 @@ class Connection(Connectable):
"""
try:
cursor.close()
- except (SystemExit, KeyboardInterrupt):
- raise
except Exception:
# log the error through the connection pool's logger.
self.engine.pool.logger.error(
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 71df29cac..0ad2efae0 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -308,7 +308,15 @@ class Dialect(object):
def get_table_names(self, connection, schema=None, **kw):
"""Return a list of table names for `schema`."""
- raise NotImplementedError
+ raise NotImplementedError()
+
+ def get_temp_table_names(self, connection, schema=None, **kw):
+ """Return a list of temporary table names on the given connection,
+ if supported by the underlying backend.
+
+ """
+
+ raise NotImplementedError()
def get_view_names(self, connection, schema=None, **kw):
"""Return a list of all view names available in the database.
@@ -319,6 +327,14 @@ class Dialect(object):
raise NotImplementedError()
+ def get_temp_view_names(self, connection, schema=None, **kw):
+ """Return a list of temporary view names on the given connection,
+ if supported by the underlying backend.
+
+ """
+
+ raise NotImplementedError()
+
def get_view_definition(self, connection, view_name, schema=None, **kw):
"""Return view definition.
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index cf1f2d3dd..2a1def86a 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -201,6 +201,30 @@ class Inspector(object):
tnames = list(topological.sort(tuples, tnames))
return tnames
+ def get_temp_table_names(self):
+ """return a list of temporary table names for the current bind.
+
+ This method is unsupported by most dialects; currently
+ only SQLite implements it.
+
+ .. versionadded:: 1.0.0
+
+ """
+ return self.dialect.get_temp_table_names(
+ self.bind, info_cache=self.info_cache)
+
+ def get_temp_view_names(self):
+ """return a list of temporary view names for the current bind.
+
+ This method is unsupported by most dialects; currently
+ only SQLite implements it.
+
+ .. versionadded:: 1.0.0
+
+ """
+ return self.dialect.get_temp_view_names(
+ self.bind, info_cache=self.info_cache)
+
def get_table_options(self, table_name, schema=None, **kw):
"""Return a dictionary of options specified when the table of the
given name was created.
@@ -465,55 +489,87 @@ class Inspector(object):
for col_d in self.get_columns(
table_name, schema, **table.dialect_kwargs):
found_table = True
- orig_name = col_d['name']
- table.dispatch.column_reflect(self, table, col_d)
+ self._reflect_column(
+ table, col_d, include_columns,
+ exclude_columns, cols_by_orig_name)
- name = col_d['name']
- if include_columns and name not in include_columns:
- continue
- if exclude_columns and name in exclude_columns:
- continue
+ if not found_table:
+ raise exc.NoSuchTableError(table.name)
- coltype = col_d['type']
+ self._reflect_pk(
+ table_name, schema, table, cols_by_orig_name, exclude_columns)
- col_kw = dict(
- (k, col_d[k])
- for k in ['nullable', 'autoincrement', 'quote', 'info', 'key']
- if k in col_d
- )
+ self._reflect_fk(
+ table_name, schema, table, cols_by_orig_name,
+ exclude_columns, reflection_options)
- colargs = []
- if col_d.get('default') is not None:
- # the "default" value is assumed to be a literal SQL
- # expression, so is wrapped in text() so that no quoting
- # occurs on re-issuance.
- colargs.append(
- sa_schema.DefaultClause(
- sql.text(col_d['default']), _reflected=True
- )
- )
+ self._reflect_indexes(
+ table_name, schema, table, cols_by_orig_name,
+ include_columns, exclude_columns, reflection_options)
- if 'sequence' in col_d:
- # TODO: mssql and sybase are using this.
- seq = col_d['sequence']
- sequence = sa_schema.Sequence(seq['name'], 1, 1)
- if 'start' in seq:
- sequence.start = seq['start']
- if 'increment' in seq:
- sequence.increment = seq['increment']
- colargs.append(sequence)
+ self._reflect_unique_constraints(
+ table_name, schema, table, cols_by_orig_name,
+ include_columns, exclude_columns, reflection_options)
- cols_by_orig_name[orig_name] = col = \
- sa_schema.Column(name, coltype, *colargs, **col_kw)
+ def _reflect_column(
+ self, table, col_d, include_columns,
+ exclude_columns, cols_by_orig_name):
- if col.key in table.primary_key:
- col.primary_key = True
- table.append_column(col)
+ orig_name = col_d['name']
- if not found_table:
- raise exc.NoSuchTableError(table.name)
+ table.dispatch.column_reflect(self, table, col_d)
+
+ # fetch name again as column_reflect is allowed to
+ # change it
+ name = col_d['name']
+ if (include_columns and name not in include_columns) \
+ or (exclude_columns and name in exclude_columns):
+ return
+
+ coltype = col_d['type']
+
+ col_kw = dict(
+ (k, col_d[k])
+ for k in ['nullable', 'autoincrement', 'quote', 'info', 'key']
+ if k in col_d
+ )
+
+ colargs = []
+ if col_d.get('default') is not None:
+ # the "default" value is assumed to be a literal SQL
+ # expression, so is wrapped in text() so that no quoting
+ # occurs on re-issuance.
+ colargs.append(
+ sa_schema.DefaultClause(
+ sql.text(col_d['default']), _reflected=True
+ )
+ )
+ if 'sequence' in col_d:
+ self._reflect_col_sequence(col_d, colargs)
+
+ cols_by_orig_name[orig_name] = col = \
+ sa_schema.Column(name, coltype, *colargs, **col_kw)
+
+ if col.key in table.primary_key:
+ col.primary_key = True
+ table.append_column(col)
+
+ def _reflect_col_sequence(self, col_d, colargs):
+ if 'sequence' in col_d:
+ # TODO: mssql and sybase are using this.
+ seq = col_d['sequence']
+ sequence = sa_schema.Sequence(seq['name'], 1, 1)
+ if 'start' in seq:
+ sequence.start = seq['start']
+ if 'increment' in seq:
+ sequence.increment = seq['increment']
+ colargs.append(sequence)
+
+ def _reflect_pk(
+ self, table_name, schema, table,
+ cols_by_orig_name, exclude_columns):
pk_cons = self.get_pk_constraint(
table_name, schema, **table.dialect_kwargs)
if pk_cons:
@@ -530,6 +586,9 @@ class Inspector(object):
# its column collection
table.primary_key._reload(pk_cols)
+ def _reflect_fk(
+ self, table_name, schema, table, cols_by_orig_name,
+ exclude_columns, reflection_options):
fkeys = self.get_foreign_keys(
table_name, schema, **table.dialect_kwargs)
for fkey_d in fkeys:
@@ -572,6 +631,10 @@ class Inspector(object):
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
conname, link_to_name=True,
**options))
+
+ def _reflect_indexes(
+ self, table_name, schema, table, cols_by_orig_name,
+ include_columns, exclude_columns, reflection_options):
# Indexes
indexes = self.get_indexes(table_name, schema)
for index_d in indexes:
@@ -579,12 +642,15 @@ class Inspector(object):
columns = index_d['column_names']
unique = index_d['unique']
flavor = index_d.get('type', 'index')
+ duplicates = index_d.get('duplicates_constraint')
if include_columns and \
not set(columns).issubset(include_columns):
util.warn(
"Omitting %s key for (%s), key covers omitted columns." %
(flavor, ', '.join(columns)))
continue
+ if duplicates:
+ continue
# look for columns by orig name in cols_by_orig_name,
# but support columns that are in-Python only as fallback
idx_cols = []
@@ -602,3 +668,43 @@ class Inspector(object):
idx_cols.append(idx_col)
sa_schema.Index(name, *idx_cols, **dict(unique=unique))
+
+ def _reflect_unique_constraints(
+ self, table_name, schema, table, cols_by_orig_name,
+ include_columns, exclude_columns, reflection_options):
+
+ # Unique Constraints
+ try:
+ constraints = self.get_unique_constraints(table_name, schema)
+ except NotImplementedError:
+ # optional dialect feature
+ return
+
+ for const_d in constraints:
+ conname = const_d['name']
+ columns = const_d['column_names']
+ duplicates = const_d.get('duplicates_index')
+ if include_columns and \
+ not set(columns).issubset(include_columns):
+ util.warn(
+ "Omitting unique constraint key for (%s), "
+ "key covers omitted columns." %
+ ', '.join(columns))
+ continue
+ if duplicates:
+ continue
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ constrained_cols = []
+ for c in columns:
+ try:
+ constrained_col = cols_by_orig_name[c] \
+ if c in cols_by_orig_name else table.c[c]
+ except KeyError:
+ util.warn(
+ "unique constraint key '%s' was not located in "
+ "columns for table '%s'" % (c, table_name))
+ else:
+ constrained_cols.append(constrained_col)
+ table.append_constraint(
+ sa_schema.UniqueConstraint(*constrained_cols, name=conname))
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index 38206be89..398ef8df6 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -162,6 +162,7 @@ class DefaultEngineStrategy(EngineStrategy):
def first_connect(dbapi_connection, connection_record):
c = base.Connection(engine, connection=dbapi_connection,
_has_events=False)
+ c._execution_options = util.immutabledict()
dialect.initialize(c)
event.listen(pool, 'first_connect', first_connect, once=True)
diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py
index dba1063cf..be2a82208 100644
--- a/lib/sqlalchemy/event/attr.py
+++ b/lib/sqlalchemy/event/attr.py
@@ -319,14 +319,12 @@ class _ListenerCollection(RefCollection, _CompoundListener):
registry._stored_in_collection_multi(self, other, to_associate)
def insert(self, event_key, propagate):
- if event_key._listen_fn not in self.listeners:
- event_key.prepend_to_list(self, self.listeners)
+ if event_key.prepend_to_list(self, self.listeners):
if propagate:
self.propagate.add(event_key._listen_fn)
def append(self, event_key, propagate):
- if event_key._listen_fn not in self.listeners:
- event_key.append_to_list(self, self.listeners)
+ if event_key.append_to_list(self, self.listeners):
if propagate:
self.propagate.add(event_key._listen_fn)
diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py
index ba2f671a3..5b422c401 100644
--- a/lib/sqlalchemy/event/registry.py
+++ b/lib/sqlalchemy/event/registry.py
@@ -71,13 +71,15 @@ def _stored_in_collection(event_key, owner):
listen_ref = weakref.ref(event_key._listen_fn)
if owner_ref in dispatch_reg:
- assert dispatch_reg[owner_ref] == listen_ref
- else:
- dispatch_reg[owner_ref] = listen_ref
+ return False
+
+ dispatch_reg[owner_ref] = listen_ref
listener_to_key = _collection_to_key[owner_ref]
listener_to_key[listen_ref] = key
+ return True
+
def _removed_from_collection(event_key, owner):
key = event_key._key
@@ -180,6 +182,17 @@ class _EventKey(object):
def listen(self, *args, **kw):
once = kw.pop("once", False)
+ named = kw.pop("named", False)
+
+ target, identifier, fn = \
+ self.dispatch_target, self.identifier, self._listen_fn
+
+ dispatch_descriptor = getattr(target.dispatch, identifier)
+
+ adjusted_fn = dispatch_descriptor._adjust_fn_spec(fn, named)
+
+ self = self.with_wrapper(adjusted_fn)
+
if once:
self.with_wrapper(
util.only_once(self._listen_fn)).listen(*args, **kw)
@@ -215,9 +228,6 @@ class _EventKey(object):
dispatch_descriptor = getattr(target.dispatch, identifier)
- fn = dispatch_descriptor._adjust_fn_spec(fn, named)
- self = self.with_wrapper(fn)
-
if insert:
dispatch_descriptor.\
for_modify(target.dispatch).insert(self, propagate)
@@ -229,18 +239,20 @@ class _EventKey(object):
def _listen_fn(self):
return self.fn_wrap or self.fn
- def append_value_to_list(self, owner, list_, value):
- _stored_in_collection(self, owner)
- list_.append(value)
-
def append_to_list(self, owner, list_):
- _stored_in_collection(self, owner)
- list_.append(self._listen_fn)
+ if _stored_in_collection(self, owner):
+ list_.append(self._listen_fn)
+ return True
+ else:
+ return False
def remove_from_list(self, owner, list_):
_removed_from_collection(self, owner)
list_.remove(self._listen_fn)
def prepend_to_list(self, owner, list_):
- _stored_in_collection(self, owner)
- list_.appendleft(self._listen_fn)
+ if _stored_in_collection(self, owner):
+ list_.appendleft(self._listen_fn)
+ return True
+ else:
+ return False
diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py
index 1ecec51b6..b4f057b0a 100644
--- a/lib/sqlalchemy/events.py
+++ b/lib/sqlalchemy/events.py
@@ -338,7 +338,7 @@ class PoolEvents(event.Events):
"""
- def reset(self, dbapi_connnection, connection_record):
+ def reset(self, dbapi_connection, connection_record):
"""Called before the "reset" action occurs for a pooled connection.
This event represents
@@ -470,7 +470,8 @@ class ConnectionEvents(event.Events):
@classmethod
def _listen(cls, event_key, retval=False):
target, identifier, fn = \
- event_key.dispatch_target, event_key.identifier, event_key.fn
+ event_key.dispatch_target, event_key.identifier, \
+ event_key._listen_fn
target._has_events = True
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index a82bae33f..3271d09d4 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -238,14 +238,16 @@ class StatementError(SQLAlchemyError):
def __str__(self):
from sqlalchemy.sql import util
- params_repr = util._repr_params(self.params, 10)
+ details = [SQLAlchemyError.__str__(self)]
+ if self.statement:
+ details.append("[SQL: %r]" % self.statement)
+ if self.params:
+ params_repr = util._repr_params(self.params, 10)
+ details.append("[parameters: %r]" % params_repr)
return ' '.join([
"(%s)" % det for det in self.detail
- ] + [
- SQLAlchemyError.__str__(self),
- repr(self.statement), repr(params_repr)
- ])
+ ] + details)
def __unicode__(self):
return self.__str__()
@@ -280,17 +282,19 @@ class DBAPIError(StatementError):
connection_invalidated=False):
# Don't ever wrap these, just return them directly as if
# DBAPIError didn't exist.
- if isinstance(orig, (KeyboardInterrupt, SystemExit, DontWrapMixin)):
+ if (isinstance(orig, BaseException) and
+ not isinstance(orig, Exception)) or \
+ isinstance(orig, DontWrapMixin):
return orig
if orig is not None:
# not a DBAPI error, statement is present.
# raise a StatementError
if not isinstance(orig, dbapi_base_err) and statement:
- msg = traceback.format_exception_only(
- orig.__class__, orig)[-1].strip()
return StatementError(
- "%s (original cause: %s)" % (str(orig), msg),
+ "(%s.%s) %s" %
+ (orig.__class__.__module__, orig.__class__.__name__,
+ orig),
statement, params, orig
)
@@ -310,13 +314,12 @@ class DBAPIError(StatementError):
def __init__(self, statement, params, orig, connection_invalidated=False):
try:
text = str(orig)
- except (KeyboardInterrupt, SystemExit):
- raise
except Exception as e:
text = 'Error in str() of DB-API-generated exception: ' + str(e)
StatementError.__init__(
self,
- '(%s) %s' % (orig.__class__.__name__, text),
+ '(%s.%s) %s' % (
+ orig.__class__.__module__, orig.__class__.__name__, text, ),
statement,
params,
orig
diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py
index 121285ab3..c11795d37 100644
--- a/lib/sqlalchemy/ext/automap.py
+++ b/lib/sqlalchemy/ext/automap.py
@@ -243,7 +243,26 @@ follows:
one-to-many backref will be created on the referred class referring
to this class.
-4. The names of the relationships are determined using the
+4. If any of the columns that are part of the :class:`.ForeignKeyConstraint`
+ are not nullable (e.g. ``nullable=False``), a
+ :paramref:`~.relationship.cascade` keyword argument
+ of ``all, delete-orphan`` will be added to the keyword arguments to
+ be passed to the relationship or backref. If the
+ :class:`.ForeignKeyConstraint` reports that
+ :paramref:`.ForeignKeyConstraint.ondelete`
+ is set to ``CASCADE`` for a not null or ``SET NULL`` for a nullable
+ set of columns, the option :paramref:`~.relationship.passive_deletes`
+ flag is set to ``True`` in the set of relationship keyword arguments.
+ Note that not all backends support reflection of ON DELETE.
+
+ .. versionadded:: 1.0.0 - automap will detect non-nullable foreign key
+ constraints when producing a one-to-many relationship and establish
+ a default cascade of ``all, delete-orphan`` if so; additionally,
+ if the constraint specifies :paramref:`.ForeignKeyConstraint.ondelete`
+ of ``CASCADE`` for non-nullable or ``SET NULL`` for nullable columns,
+ the ``passive_deletes=True`` option is also added.
+
+5. The names of the relationships are determined using the
:paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and
:paramref:`.AutomapBase.prepare.name_for_collection_relationship`
callable functions. It is important to note that the default relationship
@@ -252,18 +271,18 @@ follows:
alternate class naming scheme, that's the name from which the relationship
name will be derived.
-5. The classes are inspected for an existing mapped property matching these
+6. The classes are inspected for an existing mapped property matching these
names. If one is detected on one side, but none on the other side,
:class:`.AutomapBase` attempts to create a relationship on the missing side,
then uses the :paramref:`.relationship.back_populates` parameter in order to
point the new relationship to the other side.
-6. In the usual case where no relationship is on either side,
+7. In the usual case where no relationship is on either side,
:meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the
"many-to-one" side and matches it to the other using the
:paramref:`.relationship.backref` parameter.
-7. Production of the :func:`.relationship` and optionally the :func:`.backref`
+8. Production of the :func:`.relationship` and optionally the :func:`.backref`
is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship`
function, which can be supplied by the end-user in order to augment
the arguments passed to :func:`.relationship` or :func:`.backref` or to
@@ -877,6 +896,19 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
constraint
)
+ o2m_kws = {}
+ nullable = False not in set([fk.parent.nullable for fk in fks])
+ if not nullable:
+ o2m_kws['cascade'] = "all, delete-orphan"
+
+ if constraint.ondelete and \
+ constraint.ondelete.lower() == "cascade":
+ o2m_kws['passive_deletes'] = True
+ else:
+ if constraint.ondelete and \
+ constraint.ondelete.lower() == "set null":
+ o2m_kws['passive_deletes'] = True
+
create_backref = backref_name not in referred_cfg.properties
if relationship_name not in map_config.properties:
@@ -885,7 +917,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
automap_base,
interfaces.ONETOMANY, backref,
backref_name, referred_cls, local_cls,
- collection_class=collection_class)
+ collection_class=collection_class,
+ **o2m_kws)
else:
backref_obj = None
rel = generate_relationship(automap_base,
@@ -916,7 +949,8 @@ def _relationships_for_fks(automap_base, map_config, table_to_map_config,
fk.parent
for fk in constraint.elements],
back_populates=relationship_name,
- collection_class=collection_class)
+ collection_class=collection_class,
+ **o2m_kws)
if rel is not None:
referred_cfg.properties[backref_name] = rel
map_config.properties[
diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py
index 3cbc85c0c..2b611252a 100644
--- a/lib/sqlalchemy/ext/declarative/__init__.py
+++ b/lib/sqlalchemy/ext/declarative/__init__.py
@@ -873,8 +873,7 @@ the method without the need to copy it.
Columns generated by :class:`~.declared_attr` can also be
referenced by ``__mapper_args__`` to a limited degree, currently
-by ``polymorphic_on`` and ``version_id_col``, by specifying the
-classdecorator itself into the dictionary - the declarative extension
+by ``polymorphic_on`` and ``version_id_col``; the declarative extension
will resolve them at class construction time::
class MyMixin:
@@ -889,7 +888,6 @@ will resolve them at class construction time::
id = Column(Integer, primary_key=True)
-
Mixing in Relationships
~~~~~~~~~~~~~~~~~~~~~~~
@@ -922,6 +920,7 @@ reference a common target class via many-to-one::
__tablename__ = 'target'
id = Column(Integer, primary_key=True)
+
Using Advanced Relationship Arguments (e.g. ``primaryjoin``, etc.)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1004,6 +1003,24 @@ requirement so that no reliance on copying is needed::
class Something(SomethingMixin, Base):
__tablename__ = "something"
+The :func:`.column_property` or other construct may refer
+to other columns from the mixin. These are copied ahead of time before
+the :class:`.declared_attr` is invoked::
+
+ class SomethingMixin(object):
+ x = Column(Integer)
+
+ y = Column(Integer)
+
+ @declared_attr
+ def x_plus_y(cls):
+ return column_property(cls.x + cls.y)
+
+
+.. versionchanged:: 1.0.0 mixin columns are copied to the final mapped class
+ so that :class:`.declared_attr` methods can access the actual column
+ that will be mapped.
+
Mixing in Association Proxy and Other Attributes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1087,19 +1104,20 @@ and ``TypeB`` classes.
Controlling table inheritance with mixins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The ``__tablename__`` attribute in conjunction with the hierarchy of
-classes involved in a declarative mixin scenario controls what type of
-table inheritance, if any,
-is configured by the declarative extension.
+The ``__tablename__`` attribute may be used to provide a function that
+will determine the name of the table used for each class in an inheritance
+hierarchy, as well as whether a class has its own distinct table.
-If the ``__tablename__`` is computed by a mixin, you may need to
-control which classes get the computed attribute in order to get the
-type of table inheritance you require.
+This is achieved using the :class:`.declared_attr` indicator in conjunction
+with a method named ``__tablename__()``. Declarative will always
+invoke :class:`.declared_attr` for the special names
+``__tablename__``, ``__mapper_args__`` and ``__table_args__``
+function **for each mapped class in the hierarchy**. The function therefore
+needs to expect to receive each class individually and to provide the
+correct answer for each.
-For example, if you had a mixin that computes ``__tablename__`` but
-where you wanted to use that mixin in a single table inheritance
-hierarchy, you can explicitly specify ``__tablename__`` as ``None`` to
-indicate that the class should not have a table mapped::
+For example, to create a mixin that gives every class a simple table
+name based on class name::
from sqlalchemy.ext.declarative import declared_attr
@@ -1118,15 +1136,10 @@ indicate that the class should not have a table mapped::
__mapper_args__ = {'polymorphic_identity': 'engineer'}
primary_language = Column(String(50))
-Alternatively, you can make the mixin intelligent enough to only
-return a ``__tablename__`` in the event that no table is already
-mapped in the inheritance hierarchy. To help with this, a
-:func:`~sqlalchemy.ext.declarative.has_inherited_table` helper
-function is provided that returns ``True`` if a parent class already
-has a mapped table.
-
-As an example, here's a mixin that will only allow single table
-inheritance::
+Alternatively, we can modify our ``__tablename__`` function to return
+``None`` for subclasses, using :func:`.has_inherited_table`. This has
+the effect of those subclasses being mapped with single table inheritance
+agaisnt the parent::
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.declarative import has_inherited_table
@@ -1147,6 +1160,64 @@ inheritance::
primary_language = Column(String(50))
__mapper_args__ = {'polymorphic_identity': 'engineer'}
+.. _mixin_inheritance_columns:
+
+Mixing in Columns in Inheritance Scenarios
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In constrast to how ``__tablename__`` and other special names are handled when
+used with :class:`.declared_attr`, when we mix in columns and properties (e.g.
+relationships, column properties, etc.), the function is
+invoked for the **base class only** in the hierarchy. Below, only the
+``Person`` class will receive a column
+called ``id``; the mapping will fail on ``Engineer``, which is not given
+a primary key::
+
+ class HasId(object):
+ @declared_attr
+ def id(cls):
+ return Column('id', Integer, primary_key=True)
+
+ class Person(HasId, Base):
+ __tablename__ = 'person'
+ discriminator = Column('type', String(50))
+ __mapper_args__ = {'polymorphic_on': discriminator}
+
+ class Engineer(Person):
+ __tablename__ = 'engineer'
+ primary_language = Column(String(50))
+ __mapper_args__ = {'polymorphic_identity': 'engineer'}
+
+It is usually the case in joined-table inheritance that we want distinctly
+named columns on each subclass. However in this case, we may want to have
+an ``id`` column on every table, and have them refer to each other via
+foreign key. We can achieve this as a mixin by using the
+:attr:`.declared_attr.cascading` modifier, which indicates that the
+function should be invoked **for each class in the hierarchy**, just like
+it does for ``__tablename__``::
+
+ class HasId(object):
+ @declared_attr.cascading
+ def id(cls):
+ if has_inherited_table(cls):
+ return Column('id',
+ Integer,
+ ForeignKey('person.id'), primary_key=True)
+ else:
+ return Column('id', Integer, primary_key=True)
+
+ class Person(HasId, Base):
+ __tablename__ = 'person'
+ discriminator = Column('type', String(50))
+ __mapper_args__ = {'polymorphic_on': discriminator}
+
+ class Engineer(Person):
+ __tablename__ = 'engineer'
+ primary_language = Column(String(50))
+ __mapper_args__ = {'polymorphic_identity': 'engineer'}
+
+
+.. versionadded:: 1.0.0 added :attr:`.declared_attr.cascading`.
Combining Table/Mapper Arguments from Multiple Mixins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index daf8bffb5..66fe05fd0 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -8,12 +8,13 @@
from ...schema import Table, MetaData
-from ...orm import synonym as _orm_synonym, mapper,\
+from ...orm import synonym as _orm_synonym, \
comparable_property,\
- interfaces, properties
+ interfaces, properties, attributes
from ...orm.util import polymorphic_union
from ...orm.base import _mapper_or_none
-from ...util import OrderedDict
+from ...util import OrderedDict, hybridmethod, hybridproperty
+from ... import util
from ... import exc
import weakref
@@ -21,7 +22,6 @@ from .base import _as_declarative, \
_declarative_constructor,\
_DeferredMapperConfig, _add_attribute
from .clsregistry import _class_resolver
-from . import clsregistry
def instrument_declarative(cls, registry, metadata):
@@ -157,12 +157,98 @@ class declared_attr(interfaces._MappedAttribute, property):
"""
- def __init__(self, fget, *arg, **kw):
- super(declared_attr, self).__init__(fget, *arg, **kw)
+ def __init__(self, fget, cascading=False):
+ super(declared_attr, self).__init__(fget)
self.__doc__ = fget.__doc__
+ self._cascading = cascading
def __get__(desc, self, cls):
- return desc.fget(cls)
+ # use the ClassManager for memoization of values. This is better than
+ # adding yet another attribute onto the class, or using weakrefs
+ # here which are slow and take up memory. It also allows us to
+ # warn for non-mapped use of declared_attr.
+
+ manager = attributes.manager_of_class(cls)
+ if manager is None:
+ util.warn(
+ "Unmanaged access of declarative attribute %s from "
+ "non-mapped class %s" %
+ (desc.fget.__name__, cls.__name__))
+ return desc.fget(cls)
+ try:
+ reg = manager.info['declared_attr_reg']
+ except KeyError:
+ raise exc.InvalidRequestError(
+ "@declared_attr called outside of the "
+ "declarative mapping process; is declarative_base() being "
+ "used correctly?")
+
+ if desc in reg:
+ return reg[desc]
+ else:
+ reg[desc] = obj = desc.fget(cls)
+ return obj
+
+ @hybridmethod
+ def _stateful(cls, **kw):
+ return _stateful_declared_attr(**kw)
+
+ @hybridproperty
+ def cascading(cls):
+ """Mark a :class:`.declared_attr` as cascading.
+
+ This is a special-use modifier which indicates that a column
+ or MapperProperty-based declared attribute should be configured
+ distinctly per mapped subclass, within a mapped-inheritance scenario.
+
+ Below, both MyClass as well as MySubClass will have a distinct
+ ``id`` Column object established::
+
+ class HasSomeAttribute(object):
+ @declared_attr.cascading
+ def some_id(cls):
+ if has_inherited_table(cls):
+ return Column(
+ ForeignKey('myclass.id'), primary_key=True)
+ else:
+ return Column(Integer, primary_key=True)
+
+ return Column('id', Integer, primary_key=True)
+
+ class MyClass(HasSomeAttribute, Base):
+ ""
+ # ...
+
+ class MySubClass(MyClass):
+ ""
+ # ...
+
+ The behavior of the above configuration is that ``MySubClass``
+ will refer to both its own ``id`` column as well as that of
+ ``MyClass`` underneath the attribute named ``some_id``.
+
+ .. seealso::
+
+ :ref:`declarative_inheritance`
+
+ :ref:`mixin_inheritance_columns`
+
+
+ """
+ return cls._stateful(cascading=True)
+
+
+class _stateful_declared_attr(declared_attr):
+ def __init__(self, **kw):
+ self.kw = kw
+
+ def _stateful(self, **kw):
+ new_kw = self.kw.copy()
+ new_kw.update(kw)
+ return _stateful_declared_attr(**new_kw)
+
+ def __call__(self, fn):
+ return declared_attr(fn, **self.kw)
def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
@@ -349,9 +435,11 @@ class AbstractConcreteBase(ConcreteBase):
``__declare_last__()`` function, which is essentially
a hook for the :meth:`.after_configured` event.
- :class:`.AbstractConcreteBase` does not produce a mapped
- table for the class itself. Compare to :class:`.ConcreteBase`,
- which does.
+ :class:`.AbstractConcreteBase` does produce a mapped class
+ for the base class, however it is not persisted to any table; it
+ is instead mapped directly to the "polymorphic" selectable directly
+ and is only used for selecting. Compare to :class:`.ConcreteBase`,
+ which does create a persisted table for the base class.
Example::
@@ -365,20 +453,72 @@ class AbstractConcreteBase(ConcreteBase):
employee_id = Column(Integer, primary_key=True)
name = Column(String(50))
manager_data = Column(String(40))
+
__mapper_args__ = {
- 'polymorphic_identity':'manager',
- 'concrete':True}
+ 'polymorphic_identity':'manager',
+ 'concrete':True}
+
+ The abstract base class is handled by declarative in a special way;
+ at class configuration time, it behaves like a declarative mixin
+ or an ``__abstract__`` base class. Once classes are configured
+ and mappings are produced, it then gets mapped itself, but
+ after all of its decscendants. This is a very unique system of mapping
+ not found in any other SQLAlchemy system.
+
+ Using this approach, we can specify columns and properties
+ that will take place on mapped subclasses, in the way that
+ we normally do as in :ref:`declarative_mixins`::
+
+ class Company(Base):
+ __tablename__ = 'company'
+ id = Column(Integer, primary_key=True)
+
+ class Employee(AbstractConcreteBase, Base):
+ employee_id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def company_id(cls):
+ return Column(ForeignKey('company.id'))
+
+ @declared_attr
+ def company(cls):
+ return relationship("Company")
+
+ class Manager(Employee):
+ __tablename__ = 'manager'
+
+ name = Column(String(50))
+ manager_data = Column(String(40))
+
+ __mapper_args__ = {
+ 'polymorphic_identity':'manager',
+ 'concrete':True}
+
+ When we make use of our mappings however, both ``Manager`` and
+ ``Employee`` will have an independently usable ``.company`` attribute::
+
+ session.query(Employee).filter(Employee.company.has(id=5))
+
+ .. versionchanged:: 1.0.0 - The mechanics of :class:`.AbstractConcreteBase`
+ have been reworked to support relationships established directly
+ on the abstract base, without any special configurational steps.
+
"""
- __abstract__ = True
+ __no_table__ = True
@classmethod
def __declare_first__(cls):
- if hasattr(cls, '__mapper__'):
+ cls._sa_decl_prepare_nocascade()
+
+ @classmethod
+ def _sa_decl_prepare_nocascade(cls):
+ if getattr(cls, '__mapper__', None):
return
- clsregistry.add_class(cls.__name__, cls)
+ to_map = _DeferredMapperConfig.config_for_cls(cls)
+
# can't rely on 'self_and_descendants' here
# since technically an immediate subclass
# might not be mapped, but a subclass
@@ -392,11 +532,22 @@ class AbstractConcreteBase(ConcreteBase):
if mn is not None:
mappers.append(mn)
pjoin = cls._create_polymorphic_union(mappers)
- cls.__mapper__ = m = mapper(cls, pjoin, polymorphic_on=pjoin.c.type)
+
+ to_map.local_table = pjoin
+
+ m_args = to_map.mapper_args_fn or dict
+
+ def mapper_args():
+ args = m_args()
+ args['polymorphic_on'] = pjoin.c.type
+ return args
+ to_map.mapper_args_fn = mapper_args
+
+ m = to_map.map()
for scls in cls.__subclasses__():
sm = _mapper_or_none(scls)
- if sm.concrete and cls in scls.__bases__:
+ if sm and sm.concrete and cls in scls.__bases__:
sm._set_concrete_base(m)
diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py
index 94baeeb51..291608b6c 100644
--- a/lib/sqlalchemy/ext/declarative/base.py
+++ b/lib/sqlalchemy/ext/declarative/base.py
@@ -19,6 +19,9 @@ from ... import event
from . import clsregistry
import collections
import weakref
+from sqlalchemy.orm import instrumentation
+
+declared_attr = declarative_props = None
def _declared_mapping_info(cls):
@@ -32,322 +35,407 @@ def _declared_mapping_info(cls):
return None
+def _get_immediate_cls_attr(cls, attrname):
+ """return an attribute of the class that is either present directly
+ on the class, e.g. not on a superclass, or is from a superclass but
+ this superclass is a mixin, that is, not a descendant of
+ the declarative base.
+
+ This is used to detect attributes that indicate something about
+ a mapped class independently from any mapped classes that it may
+ inherit from.
+
+ """
+ for base in cls.__mro__:
+ _is_declarative_inherits = hasattr(base, '_decl_class_registry')
+ if attrname in base.__dict__:
+ value = getattr(base, attrname)
+ if (base is cls or
+ (base in cls.__bases__ and not _is_declarative_inherits)):
+ return value
+ else:
+ return None
+
+
def _as_declarative(cls, classname, dict_):
- from .api import declared_attr
+ global declared_attr, declarative_props
+ if declared_attr is None:
+ from .api import declared_attr
+ declarative_props = (declared_attr, util.classproperty)
- # dict_ will be a dictproxy, which we can't write to, and we need to!
- dict_ = dict(dict_)
+ if _get_immediate_cls_attr(cls, '__abstract__'):
+ return
- column_copies = {}
- potential_columns = {}
+ _MapperConfig.setup_mapping(cls, classname, dict_)
- mapper_args_fn = None
- table_args = inherited_table_args = None
- tablename = None
- declarative_props = (declared_attr, util.classproperty)
+class _MapperConfig(object):
- for base in cls.__mro__:
- _is_declarative_inherits = hasattr(base, '_decl_class_registry')
+ @classmethod
+ def setup_mapping(cls, cls_, classname, dict_):
+ defer_map = _get_immediate_cls_attr(
+ cls_, '_sa_decl_prepare_nocascade') or \
+ hasattr(cls_, '_sa_decl_prepare')
- if '__declare_last__' in base.__dict__:
- @event.listens_for(mapper, "after_configured")
- def go():
- cls.__declare_last__()
- if '__declare_first__' in base.__dict__:
- @event.listens_for(mapper, "before_configured")
- def go():
- cls.__declare_first__()
- if '__abstract__' in base.__dict__ and base.__abstract__:
- if (base is cls or
- (base in cls.__bases__ and not _is_declarative_inherits)):
- return
+ if defer_map:
+ cfg_cls = _DeferredMapperConfig
+ else:
+ cfg_cls = _MapperConfig
+ cfg_cls(cls_, classname, dict_)
- class_mapped = _declared_mapping_info(base) is not None
+ def __init__(self, cls_, classname, dict_):
- for name, obj in vars(base).items():
- if name == '__mapper_args__':
- if not mapper_args_fn and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
- # don't even invoke __mapper_args__ until
- # after we've determined everything about the
- # mapped table.
- # make a copy of it so a class-level dictionary
- # is not overwritten when we update column-based
- # arguments.
- mapper_args_fn = lambda: dict(cls.__mapper_args__)
- elif name == '__tablename__':
- if not tablename and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
- tablename = cls.__tablename__
- elif name == '__table_args__':
- if not table_args and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
- table_args = cls.__table_args__
- if not isinstance(table_args, (tuple, dict, type(None))):
- raise exc.ArgumentError(
- "__table_args__ value must be a tuple, "
- "dict, or None")
- if base is not cls:
- inherited_table_args = True
- elif class_mapped:
- if isinstance(obj, declarative_props):
- util.warn("Regular (i.e. not __special__) "
- "attribute '%s.%s' uses @declared_attr, "
- "but owning class %s is mapped - "
- "not applying to subclass %s."
- % (base.__name__, name, base, cls))
- continue
- elif base is not cls:
- # we're a mixin.
- if isinstance(obj, Column):
- if getattr(cls, name) is not obj:
- # if column has been overridden
- # (like by the InstrumentedAttribute of the
- # superclass), skip
+ self.cls = cls_
+
+ # dict_ will be a dictproxy, which we can't write to, and we need to!
+ self.dict_ = dict(dict_)
+ self.classname = classname
+ self.mapped_table = None
+ self.properties = util.OrderedDict()
+ self.declared_columns = set()
+ self.column_copies = {}
+ self._setup_declared_events()
+
+ # register up front, so that @declared_attr can memoize
+ # function evaluations in .info
+ manager = instrumentation.register_class(self.cls)
+ manager.info['declared_attr_reg'] = {}
+
+ self._scan_attributes()
+
+ clsregistry.add_class(self.classname, self.cls)
+
+ self._extract_mappable_attributes()
+
+ self._extract_declared_columns()
+
+ self._setup_table()
+
+ self._setup_inheritance()
+
+ self._early_mapping()
+
+ def _early_mapping(self):
+ self.map()
+
+ def _setup_declared_events(self):
+ if _get_immediate_cls_attr(self.cls, '__declare_last__'):
+ @event.listens_for(mapper, "after_configured")
+ def after_configured():
+ self.cls.__declare_last__()
+
+ if _get_immediate_cls_attr(self.cls, '__declare_first__'):
+ @event.listens_for(mapper, "before_configured")
+ def before_configured():
+ self.cls.__declare_first__()
+
+ def _scan_attributes(self):
+ cls = self.cls
+ dict_ = self.dict_
+ column_copies = self.column_copies
+ mapper_args_fn = None
+ table_args = inherited_table_args = None
+ tablename = None
+
+ for base in cls.__mro__:
+ class_mapped = base is not cls and \
+ _declared_mapping_info(base) is not None and \
+ not _get_immediate_cls_attr(base, '_sa_decl_prepare_nocascade')
+
+ if not class_mapped and base is not cls:
+ self._produce_column_copies(base)
+
+ for name, obj in vars(base).items():
+ if name == '__mapper_args__':
+ if not mapper_args_fn and (
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
+ # don't even invoke __mapper_args__ until
+ # after we've determined everything about the
+ # mapped table.
+ # make a copy of it so a class-level dictionary
+ # is not overwritten when we update column-based
+ # arguments.
+ mapper_args_fn = lambda: dict(cls.__mapper_args__)
+ elif name == '__tablename__':
+ if not tablename and (
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
+ tablename = cls.__tablename__
+ elif name == '__table_args__':
+ if not table_args and (
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
+ table_args = cls.__table_args__
+ if not isinstance(
+ table_args, (tuple, dict, type(None))):
+ raise exc.ArgumentError(
+ "__table_args__ value must be a tuple, "
+ "dict, or None")
+ if base is not cls:
+ inherited_table_args = True
+ elif class_mapped:
+ if isinstance(obj, declarative_props):
+ util.warn("Regular (i.e. not __special__) "
+ "attribute '%s.%s' uses @declared_attr, "
+ "but owning class %s is mapped - "
+ "not applying to subclass %s."
+ % (base.__name__, name, base, cls))
+ continue
+ elif base is not cls:
+ # we're a mixin, abstract base, or something that is
+ # acting like that for now.
+ if isinstance(obj, Column):
+ # already copied columns to the mapped class.
continue
- if obj.foreign_keys:
+ elif isinstance(obj, MapperProperty):
raise exc.InvalidRequestError(
- "Columns with foreign keys to other columns "
- "must be declared as @declared_attr callables "
- "on declarative mixin classes. ")
- if name not in dict_ and not (
- '__table__' in dict_ and
- (obj.name or name) in dict_['__table__'].c
- ) and name not in potential_columns:
- potential_columns[name] = \
- column_copies[obj] = \
- obj.copy()
- column_copies[obj]._creation_order = \
- obj._creation_order
- elif isinstance(obj, MapperProperty):
+ "Mapper properties (i.e. deferred,"
+ "column_property(), relationship(), etc.) must "
+ "be declared as @declared_attr callables "
+ "on declarative mixin classes.")
+ elif isinstance(obj, declarative_props):
+ oldclassprop = isinstance(obj, util.classproperty)
+ if not oldclassprop and obj._cascading:
+ dict_[name] = column_copies[obj] = \
+ ret = obj.__get__(obj, cls)
+ else:
+ if oldclassprop:
+ util.warn_deprecated(
+ "Use of sqlalchemy.util.classproperty on "
+ "declarative classes is deprecated.")
+ dict_[name] = column_copies[obj] = \
+ ret = getattr(cls, name)
+ if isinstance(ret, (Column, MapperProperty)) and \
+ ret.doc is None:
+ ret.doc = obj.__doc__
+
+ if inherited_table_args and not tablename:
+ table_args = None
+
+ self.table_args = table_args
+ self.tablename = tablename
+ self.mapper_args_fn = mapper_args_fn
+
+ def _produce_column_copies(self, base):
+ cls = self.cls
+ dict_ = self.dict_
+ column_copies = self.column_copies
+ # copy mixin columns to the mapped class
+ for name, obj in vars(base).items():
+ if isinstance(obj, Column):
+ if getattr(cls, name) is not obj:
+ # if column has been overridden
+ # (like by the InstrumentedAttribute of the
+ # superclass), skip
+ continue
+ elif obj.foreign_keys:
raise exc.InvalidRequestError(
- "Mapper properties (i.e. deferred,"
- "column_property(), relationship(), etc.) must "
- "be declared as @declared_attr callables "
- "on declarative mixin classes.")
- elif isinstance(obj, declarative_props):
- dict_[name] = ret = \
- column_copies[obj] = getattr(cls, name)
- if isinstance(ret, (Column, MapperProperty)) and \
- ret.doc is None:
- ret.doc = obj.__doc__
-
- # apply inherited columns as we should
- for k, v in potential_columns.items():
- dict_[k] = v
-
- if inherited_table_args and not tablename:
- table_args = None
-
- clsregistry.add_class(classname, cls)
- our_stuff = util.OrderedDict()
-
- for k in list(dict_):
-
- # TODO: improve this ? all dunders ?
- if k in ('__table__', '__tablename__', '__mapper_args__'):
- continue
-
- value = dict_[k]
- if isinstance(value, declarative_props):
- value = getattr(cls, k)
-
- elif isinstance(value, QueryableAttribute) and \
- value.class_ is not cls and \
- value.key != k:
- # detect a QueryableAttribute that's already mapped being
- # assigned elsewhere in userland, turn into a synonym()
- value = synonym(value.key)
- setattr(cls, k, value)
-
- if (isinstance(value, tuple) and len(value) == 1 and
- isinstance(value[0], (Column, MapperProperty))):
- util.warn("Ignoring declarative-like tuple value of attribute "
- "%s: possibly a copy-and-paste error with a comma "
- "left at the end of the line?" % k)
- continue
- if not isinstance(value, (Column, MapperProperty)):
- if not k.startswith('__'):
- dict_.pop(k)
- setattr(cls, k, value)
- continue
- if k == 'metadata':
- raise exc.InvalidRequestError(
- "Attribute name 'metadata' is reserved "
- "for the MetaData instance when using a "
- "declarative base class."
- )
- prop = clsregistry._deferred_relationship(cls, value)
- our_stuff[k] = prop
-
- # set up attributes in the order they were created
- our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)
-
- # extract columns from the class dict
- declared_columns = set()
- name_to_prop_key = collections.defaultdict(set)
- for key, c in list(our_stuff.items()):
- if isinstance(c, (ColumnProperty, CompositeProperty)):
- for col in c.columns:
- if isinstance(col, Column) and \
- col.table is None:
- _undefer_column_name(key, col)
- if not isinstance(c, CompositeProperty):
- name_to_prop_key[col.name].add(key)
- declared_columns.add(col)
- elif isinstance(c, Column):
- _undefer_column_name(key, c)
- name_to_prop_key[c.name].add(key)
- declared_columns.add(c)
- # if the column is the same name as the key,
- # remove it from the explicit properties dict.
- # the normal rules for assigning column-based properties
- # will take over, including precedence of columns
- # in multi-column ColumnProperties.
- if key == c.key:
- del our_stuff[key]
-
- for name, keys in name_to_prop_key.items():
- if len(keys) > 1:
- util.warn(
- "On class %r, Column object %r named directly multiple times, "
- "only one will be used: %s" %
- (classname, name, (", ".join(sorted(keys))))
- )
+ "Columns with foreign keys to other columns "
+ "must be declared as @declared_attr callables "
+ "on declarative mixin classes. ")
+ elif name not in dict_ and not (
+ '__table__' in dict_ and
+ (obj.name or name) in dict_['__table__'].c
+ ):
+ column_copies[obj] = copy_ = obj.copy()
+ copy_._creation_order = obj._creation_order
+ setattr(cls, name, copy_)
+ dict_[name] = copy_
- declared_columns = sorted(
- declared_columns, key=lambda c: c._creation_order)
- table = None
+ def _extract_mappable_attributes(self):
+ cls = self.cls
+ dict_ = self.dict_
- if hasattr(cls, '__table_cls__'):
- table_cls = util.unbound_method_to_callable(cls.__table_cls__)
- else:
- table_cls = Table
-
- if '__table__' not in dict_:
- if tablename is not None:
-
- args, table_kw = (), {}
- if table_args:
- if isinstance(table_args, dict):
- table_kw = table_args
- elif isinstance(table_args, tuple):
- if isinstance(table_args[-1], dict):
- args, table_kw = table_args[0:-1], table_args[-1]
- else:
- args = table_args
-
- autoload = dict_.get('__autoload__')
- if autoload:
- table_kw['autoload'] = True
-
- cls.__table__ = table = table_cls(
- tablename, cls.metadata,
- *(tuple(declared_columns) + tuple(args)),
- **table_kw)
- else:
- table = cls.__table__
- if declared_columns:
- for c in declared_columns:
- if not table.c.contains_column(c):
- raise exc.ArgumentError(
- "Can't add additional column %r when "
- "specifying __table__" % c.key
- )
+ our_stuff = self.properties
- if hasattr(cls, '__mapper_cls__'):
- mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
- else:
- mapper_cls = mapper
+ for k in list(dict_):
- for c in cls.__bases__:
- if _declared_mapping_info(c) is not None:
- inherits = c
- break
- else:
- inherits = None
+ if k in ('__table__', '__tablename__', '__mapper_args__'):
+ continue
- if table is None and inherits is None:
- raise exc.InvalidRequestError(
- "Class %r does not have a __table__ or __tablename__ "
- "specified and does not inherit from an existing "
- "table-mapped class." % cls
- )
- elif inherits:
- inherited_mapper = _declared_mapping_info(inherits)
- inherited_table = inherited_mapper.local_table
- inherited_mapped_table = inherited_mapper.mapped_table
-
- if table is None:
- # single table inheritance.
- # ensure no table args
- if table_args:
- raise exc.ArgumentError(
- "Can't place __table_args__ on an inherited class "
- "with no table."
+ value = dict_[k]
+ if isinstance(value, declarative_props):
+ value = getattr(cls, k)
+
+ elif isinstance(value, QueryableAttribute) and \
+ value.class_ is not cls and \
+ value.key != k:
+ # detect a QueryableAttribute that's already mapped being
+ # assigned elsewhere in userland, turn into a synonym()
+ value = synonym(value.key)
+ setattr(cls, k, value)
+
+ if (isinstance(value, tuple) and len(value) == 1 and
+ isinstance(value[0], (Column, MapperProperty))):
+ util.warn("Ignoring declarative-like tuple value of attribute "
+ "%s: possibly a copy-and-paste error with a comma "
+ "left at the end of the line?" % k)
+ continue
+ elif not isinstance(value, (Column, MapperProperty)):
+ # using @declared_attr for some object that
+ # isn't Column/MapperProperty; remove from the dict_
+ # and place the evaulated value onto the class.
+ if not k.startswith('__'):
+ dict_.pop(k)
+ setattr(cls, k, value)
+ continue
+ # we expect to see the name 'metadata' in some valid cases;
+ # however at this point we see it's assigned to something trying
+ # to be mapped, so raise for that.
+ elif k == 'metadata':
+ raise exc.InvalidRequestError(
+ "Attribute name 'metadata' is reserved "
+ "for the MetaData instance when using a "
+ "declarative base class."
+ )
+ prop = clsregistry._deferred_relationship(cls, value)
+ our_stuff[k] = prop
+
+ def _extract_declared_columns(self):
+ our_stuff = self.properties
+
+ # set up attributes in the order they were created
+ our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)
+
+ # extract columns from the class dict
+ declared_columns = self.declared_columns
+ name_to_prop_key = collections.defaultdict(set)
+ for key, c in list(our_stuff.items()):
+ if isinstance(c, (ColumnProperty, CompositeProperty)):
+ for col in c.columns:
+ if isinstance(col, Column) and \
+ col.table is None:
+ _undefer_column_name(key, col)
+ if not isinstance(c, CompositeProperty):
+ name_to_prop_key[col.name].add(key)
+ declared_columns.add(col)
+ elif isinstance(c, Column):
+ _undefer_column_name(key, c)
+ name_to_prop_key[c.name].add(key)
+ declared_columns.add(c)
+ # if the column is the same name as the key,
+ # remove it from the explicit properties dict.
+ # the normal rules for assigning column-based properties
+ # will take over, including precedence of columns
+ # in multi-column ColumnProperties.
+ if key == c.key:
+ del our_stuff[key]
+
+ for name, keys in name_to_prop_key.items():
+ if len(keys) > 1:
+ util.warn(
+ "On class %r, Column object %r named "
+ "directly multiple times, "
+ "only one will be used: %s" %
+ (self.classname, name, (", ".join(sorted(keys))))
)
- # add any columns declared here to the inherited table.
- for c in declared_columns:
- if c.primary_key:
- raise exc.ArgumentError(
- "Can't place primary key columns on an inherited "
- "class with no table."
- )
- if c.name in inherited_table.c:
- if inherited_table.c[c.name] is c:
- continue
- raise exc.ArgumentError(
- "Column '%s' on class %s conflicts with "
- "existing column '%s'" %
- (c, cls, inherited_table.c[c.name])
- )
- inherited_table.append_column(c)
- if inherited_mapped_table is not None and \
- inherited_mapped_table is not inherited_table:
- inherited_mapped_table._refresh_for_new_column(c)
-
- defer_map = hasattr(cls, '_sa_decl_prepare')
- if defer_map:
- cfg_cls = _DeferredMapperConfig
- else:
- cfg_cls = _MapperConfig
- mt = cfg_cls(mapper_cls,
- cls, table,
- inherits,
- declared_columns,
- column_copies,
- our_stuff,
- mapper_args_fn)
- if not defer_map:
- mt.map()
+ def _setup_table(self):
+ cls = self.cls
+ tablename = self.tablename
+ table_args = self.table_args
+ dict_ = self.dict_
+ declared_columns = self.declared_columns
-class _MapperConfig(object):
+ declared_columns = self.declared_columns = sorted(
+ declared_columns, key=lambda c: c._creation_order)
+ table = None
- mapped_table = None
-
- def __init__(self, mapper_cls,
- cls,
- table,
- inherits,
- declared_columns,
- column_copies,
- properties, mapper_args_fn):
- self.mapper_cls = mapper_cls
- self.cls = cls
+ if hasattr(cls, '__table_cls__'):
+ table_cls = util.unbound_method_to_callable(cls.__table_cls__)
+ else:
+ table_cls = Table
+
+ if '__table__' not in dict_:
+ if tablename is not None:
+
+ args, table_kw = (), {}
+ if table_args:
+ if isinstance(table_args, dict):
+ table_kw = table_args
+ elif isinstance(table_args, tuple):
+ if isinstance(table_args[-1], dict):
+ args, table_kw = table_args[0:-1], table_args[-1]
+ else:
+ args = table_args
+
+ autoload = dict_.get('__autoload__')
+ if autoload:
+ table_kw['autoload'] = True
+
+ cls.__table__ = table = table_cls(
+ tablename, cls.metadata,
+ *(tuple(declared_columns) + tuple(args)),
+ **table_kw)
+ else:
+ table = cls.__table__
+ if declared_columns:
+ for c in declared_columns:
+ if not table.c.contains_column(c):
+ raise exc.ArgumentError(
+ "Can't add additional column %r when "
+ "specifying __table__" % c.key
+ )
self.local_table = table
- self.inherits = inherits
- self.properties = properties
- self.mapper_args_fn = mapper_args_fn
- self.declared_columns = declared_columns
- self.column_copies = column_copies
+
+ def _setup_inheritance(self):
+ table = self.local_table
+ cls = self.cls
+ table_args = self.table_args
+ declared_columns = self.declared_columns
+ for c in cls.__bases__:
+ if _declared_mapping_info(c) is not None and \
+ not _get_immediate_cls_attr(
+ c, '_sa_decl_prepare_nocascade'):
+ self.inherits = c
+ break
+ else:
+ self.inherits = None
+
+ if table is None and self.inherits is None and \
+ not _get_immediate_cls_attr(cls, '__no_table__'):
+
+ raise exc.InvalidRequestError(
+ "Class %r does not have a __table__ or __tablename__ "
+ "specified and does not inherit from an existing "
+ "table-mapped class." % cls
+ )
+ elif self.inherits:
+ inherited_mapper = _declared_mapping_info(self.inherits)
+ inherited_table = inherited_mapper.local_table
+ inherited_mapped_table = inherited_mapper.mapped_table
+
+ if table is None:
+ # single table inheritance.
+ # ensure no table args
+ if table_args:
+ raise exc.ArgumentError(
+ "Can't place __table_args__ on an inherited class "
+ "with no table."
+ )
+ # add any columns declared here to the inherited table.
+ for c in declared_columns:
+ if c.primary_key:
+ raise exc.ArgumentError(
+ "Can't place primary key columns on an inherited "
+ "class with no table."
+ )
+ if c.name in inherited_table.c:
+ if inherited_table.c[c.name] is c:
+ continue
+ raise exc.ArgumentError(
+ "Column '%s' on class %s conflicts with "
+ "existing column '%s'" %
+ (c, cls, inherited_table.c[c.name])
+ )
+ inherited_table.append_column(c)
+ if inherited_mapped_table is not None and \
+ inherited_mapped_table is not inherited_table:
+ inherited_mapped_table._refresh_for_new_column(c)
def _prepare_mapper_arguments(self):
properties = self.properties
@@ -401,20 +489,31 @@ class _MapperConfig(object):
properties[k] = [col] + p.columns
result_mapper_args = mapper_args.copy()
result_mapper_args['properties'] = properties
- return result_mapper_args
+ self.mapper_args = result_mapper_args
def map(self):
- mapper_args = self._prepare_mapper_arguments()
- self.cls.__mapper__ = self.mapper_cls(
+ self._prepare_mapper_arguments()
+ if hasattr(self.cls, '__mapper_cls__'):
+ mapper_cls = util.unbound_method_to_callable(
+ self.cls.__mapper_cls__)
+ else:
+ mapper_cls = mapper
+
+ self.cls.__mapper__ = mp_ = mapper_cls(
self.cls,
self.local_table,
- **mapper_args
+ **self.mapper_args
)
+ del mp_.class_manager.info['declared_attr_reg']
+ return mp_
class _DeferredMapperConfig(_MapperConfig):
_configs = util.OrderedDict()
+ def _early_mapping(self):
+ pass
+
@property
def cls(self):
return self._cls()
@@ -466,7 +565,7 @@ class _DeferredMapperConfig(_MapperConfig):
def map(self):
self._configs.pop(self._cls, None)
- super(_DeferredMapperConfig, self).map()
+ return super(_DeferredMapperConfig, self).map()
def _add_attribute(cls, key, value):
diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py
index 4595b857a..3ef63a5ae 100644
--- a/lib/sqlalchemy/ext/declarative/clsregistry.py
+++ b/lib/sqlalchemy/ext/declarative/clsregistry.py
@@ -103,7 +103,12 @@ class _MultipleClassMarker(object):
self.on_remove()
def add_item(self, item):
- modules = set([cls().__module__ for cls in self.contents])
+ # protect against class registration race condition against
+ # asynchronous garbage collection calling _remove_item,
+ # [ticket:3208]
+ modules = set([
+ cls.__module__ for cls in
+ [ref() for ref in self.contents] if cls is not None])
if item.__module__ in modules:
util.warn(
"This declarative base already contains a class with the "
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index 67fda44c4..61155731c 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -119,7 +119,7 @@ start numbering at 1 or some other integer, provide ``count_from=1``.
"""
-from ..orm.collections import collection
+from ..orm.collections import collection, collection_adapter
from .. import util
__all__ = ['ordering_list']
@@ -319,7 +319,10 @@ class OrderingList(list):
def remove(self, entity):
super(OrderingList, self).remove(entity)
- self._reorder()
+
+ adapter = collection_adapter(self)
+ if adapter and adapter._referenced_by_owner:
+ self._reorder()
def pop(self, index=-1):
entity = super(OrderingList, self).pop(index)
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index 1fc0873bd..356a8a3b9 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -589,6 +589,16 @@ class CollectionAdapter(object):
"The entity collection being adapted."
return self._data()
+ @property
+ def _referenced_by_owner(self):
+ """return True if the owner state still refers to this collection.
+
+ This will return False within a bulk replace operation,
+ where this collection is the one being replaced.
+
+ """
+ return self.owner_state.dict[self._key] is self._data()
+
@util.memoized_property
def attr(self):
return self.owner_state.manager[self._key].impl
@@ -851,11 +861,24 @@ def _instrument_class(cls):
"Can not instrument a built-in type. Use a "
"subclass, even a trivial one.")
+ roles, methods = _locate_roles_and_methods(cls)
+
+ _setup_canned_roles(cls, roles, methods)
+
+ _assert_required_roles(cls, roles, methods)
+
+ _set_collection_attributes(cls, roles, methods)
+
+
+def _locate_roles_and_methods(cls):
+ """search for _sa_instrument_role-decorated methods in
+ method resolution order, assign to roles.
+
+ """
+
roles = {}
methods = {}
- # search for _sa_instrument_role-decorated methods in
- # method resolution order, assign to roles
for supercls in cls.__mro__:
for name, method in vars(supercls).items():
if not util.callable(method):
@@ -880,14 +903,19 @@ def _instrument_class(cls):
assert op in ('fire_append_event', 'fire_remove_event')
after = op
if before:
- methods[name] = before[0], before[1], after
+ methods[name] = before + (after, )
elif after:
methods[name] = None, None, after
+ return roles, methods
+
- # see if this class has "canned" roles based on a known
- # collection type (dict, set, list). Apply those roles
- # as needed to the "roles" dictionary, and also
- # prepare "decorator" methods
+def _setup_canned_roles(cls, roles, methods):
+ """see if this class has "canned" roles based on a known
+ collection type (dict, set, list). Apply those roles
+ as needed to the "roles" dictionary, and also
+ prepare "decorator" methods
+
+ """
collection_type = util.duck_type_collection(cls)
if collection_type in __interfaces:
canned_roles, decorators = __interfaces[collection_type]
@@ -901,8 +929,12 @@ def _instrument_class(cls):
not hasattr(fn, '_sa_instrumented')):
setattr(cls, method, decorator(fn))
- # ensure all roles are present, and apply implicit instrumentation if
- # needed
+
+def _assert_required_roles(cls, roles, methods):
+ """ensure all roles are present, and apply implicit instrumentation if
+ needed
+
+ """
if 'appender' not in roles or not hasattr(cls, roles['appender']):
raise sa_exc.ArgumentError(
"Type %s must elect an appender method to be "
@@ -924,8 +956,12 @@ def _instrument_class(cls):
"Type %s must elect an iterator method to be "
"a collection class" % cls.__name__)
- # apply ad-hoc instrumentation from decorators, class-level defaults
- # and implicit role declarations
+
+def _set_collection_attributes(cls, roles, methods):
+ """apply ad-hoc instrumentation from decorators, class-level defaults
+ and implicit role declarations
+
+ """
for method_name, (before, argument, after) in methods.items():
setattr(cls, method_name,
_instrument_membership_mutator(getattr(cls, method_name),
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index c50a7b062..9ea0dd834 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -61,7 +61,8 @@ class InstrumentationEvents(event.Events):
@classmethod
def _listen(cls, event_key, propagate=True, **kw):
target, identifier, fn = \
- event_key.dispatch_target, event_key.identifier, event_key.fn
+ event_key.dispatch_target, event_key.identifier, \
+ event_key._listen_fn
def listen(target_cls, *arg):
listen_cls = target()
@@ -192,7 +193,8 @@ class InstanceEvents(event.Events):
@classmethod
def _listen(cls, event_key, raw=False, propagate=False, **kw):
target, identifier, fn = \
- event_key.dispatch_target, event_key.identifier, event_key.fn
+ event_key.dispatch_target, event_key.identifier, \
+ event_key._listen_fn
if not raw:
def wrap(state, *arg, **kw):
@@ -498,7 +500,8 @@ class MapperEvents(event.Events):
def _listen(
cls, event_key, raw=False, retval=False, propagate=False, **kw):
target, identifier, fn = \
- event_key.dispatch_target, event_key.identifier, event_key.fn
+ event_key.dispatch_target, event_key.identifier, \
+ event_key._listen_fn
if identifier in ("before_configured", "after_configured") and \
target is not mapperlib.Mapper:
@@ -1493,7 +1496,8 @@ class AttributeEvents(event.Events):
propagate=False):
target, identifier, fn = \
- event_key.dispatch_target, event_key.identifier, event_key.fn
+ event_key.dispatch_target, event_key.identifier, \
+ event_key._listen_fn
if active_history:
target.dispatch._active_history = True
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index a59a38a5b..7e88ba161 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -426,6 +426,12 @@ class Mapper(InspectionAttr):
thus persisting the value to the ``discriminator`` column
in the database.
+ .. warning::
+
+ Currently, **only one discriminator column may be set**, typically
+ on the base-most class in the hierarchy. "Cascading" polymorphic
+ columns are not yet supported.
+
.. seealso::
:ref:`inheritance_toplevel`
@@ -1080,6 +1086,9 @@ class Mapper(InspectionAttr):
auto-session attachment logic.
"""
+
+ # when using declarative as of 1.0, the register_class has
+ # already happened from within declarative.
manager = attributes.manager_of_class(self.class_)
if self.non_primary:
@@ -1102,18 +1111,14 @@ class Mapper(InspectionAttr):
"create a non primary Mapper. clear_mappers() will "
"remove *all* current mappers from all classes." %
self.class_)
- # else:
- # a ClassManager may already exist as
- # ClassManager.instrument_attribute() creates
- # new managers for each subclass if they don't yet exist.
+
+ if manager is None:
+ manager = instrumentation.register_class(self.class_)
_mapper_registry[self] = True
self.dispatch.instrument_class(self, self.class_)
- if manager is None:
- manager = instrumentation.register_class(self.class_)
-
self.class_manager = manager
manager.mapper = self
@@ -2649,7 +2654,7 @@ def configure_mappers():
mapper._expire_memoizations()
mapper.dispatch.mapper_configured(
mapper, mapper.class_)
- except:
+ except Exception:
exc = sys.exc_info()[1]
if not hasattr(exc, '_configure_failed'):
mapper._configure_failed = exc
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 74e69e44c..114b79ea5 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -18,7 +18,7 @@ import operator
from itertools import groupby
from .. import sql, util, exc as sa_exc, schema
from . import attributes, sync, exc as orm_exc, evaluator
-from .base import state_str, _attr_as_key
+from .base import state_str, _attr_as_key, _entity_descriptor
from ..sql import expression
from . import loading
@@ -987,6 +987,7 @@ class BulkUpdate(BulkUD):
super(BulkUpdate, self).__init__(query)
self.query._no_select_modifiers("update")
self.values = values
+ self.mapper = self.query._mapper_zero_or_none()
@classmethod
def factory(cls, query, synchronize_session, values):
@@ -996,9 +997,40 @@ class BulkUpdate(BulkUD):
False: BulkUpdate
}, synchronize_session, query, values)
+ def _resolve_string_to_expr(self, key):
+ if self.mapper and isinstance(key, util.string_types):
+ attr = _entity_descriptor(self.mapper, key)
+ return attr.__clause_element__()
+ else:
+ return key
+
+ def _resolve_key_to_attrname(self, key):
+ if self.mapper and isinstance(key, util.string_types):
+ attr = _entity_descriptor(self.mapper, key)
+ return attr.property.key
+ elif isinstance(key, attributes.InstrumentedAttribute):
+ return key.key
+ elif hasattr(key, '__clause_element__'):
+ key = key.__clause_element__()
+
+ if self.mapper and isinstance(key, expression.ColumnElement):
+ try:
+ attr = self.mapper._columntoproperty[key]
+ except orm_exc.UnmappedColumnError:
+ return None
+ else:
+ return attr.key
+ else:
+ raise sa_exc.InvalidRequestError(
+ "Invalid expression type: %r" % key)
+
def _do_exec(self):
+ values = dict(
+ (self._resolve_string_to_expr(k), v)
+ for k, v in self.values.items()
+ )
update_stmt = sql.update(self.primary_table,
- self.context.whereclause, self.values)
+ self.context.whereclause, values)
self.result = self.query.session.execute(
update_stmt, params=self.query._params)
@@ -1044,9 +1076,10 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
def _additional_evaluators(self, evaluator_compiler):
self.value_evaluators = {}
for key, value in self.values.items():
- key = _attr_as_key(key)
- self.value_evaluators[key] = evaluator_compiler.process(
- expression._literal_as_binds(value))
+ key = self._resolve_key_to_attrname(key)
+ if key is not None:
+ self.value_evaluators[key] = evaluator_compiler.process(
+ expression._literal_as_binds(value))
def _do_post_synchronize(self):
session = self.query.session
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 60948293b..f07060825 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -1145,7 +1145,8 @@ class Query(object):
@_generative()
def with_hint(self, selectable, text, dialect_name='*'):
- """Add an indexing hint for the given entity or selectable to
+ """Add an indexing or other executional context
+ hint for the given entity or selectable to
this :class:`.Query`.
Functionality is passed straight through to
@@ -1153,11 +1154,35 @@ class Query(object):
with the addition that ``selectable`` can be a
:class:`.Table`, :class:`.Alias`, or ORM entity / mapped class
/etc.
+
+ .. seealso::
+
+ :meth:`.Query.with_statement_hint`
+
"""
- selectable = inspect(selectable).selectable
+ if selectable is not None:
+ selectable = inspect(selectable).selectable
self._with_hints += ((selectable, text, dialect_name),)
+ def with_statement_hint(self, text, dialect_name='*'):
+ """add a statement hint to this :class:`.Select`.
+
+ This method is similar to :meth:`.Select.with_hint` except that
+ it does not require an individual table, and instead applies to the
+ statement as a whole.
+
+ This feature calls down into :meth:`.Select.with_statement_hint`.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.Query.with_hint`
+
+ """
+ return self.with_hint(None, text, dialect_name)
+
@_generative()
def execution_options(self, **kwargs):
""" Set non-SQL options which take effect during execution.
@@ -1810,6 +1835,11 @@ class Query(object):
left_entity = prop = None
+ if isinstance(onclause, interfaces.PropComparator):
+ of_type = getattr(onclause, '_of_type', None)
+ else:
+ of_type = None
+
if isinstance(onclause, util.string_types):
left_entity = self._joinpoint_zero()
@@ -1836,8 +1866,6 @@ class Query(object):
if isinstance(onclause, interfaces.PropComparator):
if right_entity is None:
- right_entity = onclause.property.mapper
- of_type = getattr(onclause, '_of_type', None)
if of_type:
right_entity = of_type
else:
@@ -1919,11 +1947,9 @@ class Query(object):
from_obj, r_info.selectable):
overlap = True
break
- elif sql_util.selectables_overlap(l_info.selectable,
- r_info.selectable):
- overlap = True
- if overlap and l_info.selectable is r_info.selectable:
+ if (overlap or not create_aliases) and \
+ l_info.selectable is r_info.selectable:
raise sa_exc.InvalidRequestError(
"Can't join table/selectable '%s' to itself" %
l_info.selectable)
@@ -2591,6 +2617,19 @@ class Query(object):
SELECT 1 FROM users WHERE users.name = :name_1
) AS anon_1
+ The EXISTS construct is usually used in the WHERE clause::
+
+ session.query(User.id).filter(q.exists()).scalar()
+
+ Note that some databases such as SQL Server don't allow an
+ EXISTS expression to be present in the columns clause of a
+ SELECT. To select a simple boolean value based on the exists
+ as a WHERE, use :func:`.literal`::
+
+ from sqlalchemy import literal
+
+ session.query(literal(True)).filter(q.exists()).scalar()
+
.. versionadded:: 0.8.1
"""
@@ -2718,9 +2757,25 @@ class Query(object):
Updates rows matched by this query in the database.
- :param values: a dictionary with attributes names as keys and literal
+ E.g.::
+
+ sess.query(User).filter(User.age == 25).\
+ update({User.age: User.age - 10}, synchronize_session='fetch')
+
+
+ sess.query(User).filter(User.age == 25).\
+ update({"age": User.age - 10}, synchronize_session='evaluate')
+
+
+ :param values: a dictionary with attributes names, or alternatively
+ mapped attributes or SQL expressions, as keys, and literal
values or sql expressions as values.
+ .. versionchanged:: 1.0.0 - string names in the values dictionary
+ are now resolved against the mapped entity; previously, these
+ strings were passed as literal column names with no mapper-level
+ translation.
+
:param synchronize_session: chooses the strategy to update the
attributes on objects in the session. Valid values are:
@@ -2758,7 +2813,7 @@ class Query(object):
which normally occurs upon :meth:`.Session.commit` or can be forced
by using :meth:`.Session.expire_all`.
- * As of 0.8, this method will support multiple table updates, as
+ * The method supports multiple table updates, as
detailed in :ref:`multi_table_updates`, and this behavior does
extend to support updates of joined-inheritance and other multiple
table mappings. However, the **join condition of an inheritance
@@ -2789,12 +2844,6 @@ class Query(object):
"""
- # TODO: value keys need to be mapped to corresponding sql cols and
- # instr.attr.s to string keys
- # TODO: updates of manytoone relationships need to be converted to
- # fk assignments
- # TODO: cascades need handling.
-
update_op = persistence.BulkUpdate.factory(
self, synchronize_session, values)
update_op.exec_()
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index 95ff21444..86f1b3f82 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -16,6 +16,7 @@ and `secondaryjoin` aspects of :func:`.relationship`.
from __future__ import absolute_import
from .. import sql, util, exc as sa_exc, schema, log
+import weakref
from .util import CascadeOptions, _orm_annotate, _orm_deannotate
from . import dependency
from . import attributes
@@ -1532,6 +1533,7 @@ class RelationshipProperty(StrategizedProperty):
self._check_cascade_settings(self._cascade)
self._post_init()
self._generate_backref()
+ self._join_condition._warn_for_conflicting_sync_targets()
super(RelationshipProperty, self).do_init()
self._lazy_strategy = self._get_strategy((("lazy", "select"),))
@@ -2181,7 +2183,7 @@ class JoinCondition(object):
elif self._local_remote_pairs or self._remote_side:
self._annotate_remote_from_args()
elif self._refers_to_parent_table():
- self._annotate_selfref(lambda col: "foreign" in col._annotations)
+ self._annotate_selfref(lambda col: "foreign" in col._annotations, False)
elif self._tables_overlap():
self._annotate_remote_with_overlap()
else:
@@ -2200,7 +2202,7 @@ class JoinCondition(object):
self.secondaryjoin = visitors.replacement_traverse(
self.secondaryjoin, {}, repl)
- def _annotate_selfref(self, fn):
+ def _annotate_selfref(self, fn, remote_side_given):
"""annotate 'remote' in primaryjoin, secondaryjoin
when the relationship is detected as self-referential.
@@ -2215,7 +2217,7 @@ class JoinCondition(object):
if fn(binary.right) and not equated:
binary.right = binary.right._annotate(
{"remote": True})
- else:
+ elif not remote_side_given:
self._warn_non_column_elements()
self.primaryjoin = visitors.cloned_traverse(
@@ -2240,7 +2242,7 @@ class JoinCondition(object):
remote_side = self._remote_side
if self._refers_to_parent_table():
- self._annotate_selfref(lambda col: col in remote_side)
+ self._annotate_selfref(lambda col: col in remote_side, True)
else:
def repl(element):
if element in remote_side:
@@ -2519,6 +2521,60 @@ class JoinCondition(object):
self.secondary_synchronize_pairs = \
self._deannotate_pairs(secondary_sync_pairs)
+ _track_overlapping_sync_targets = weakref.WeakKeyDictionary()
+
+ def _warn_for_conflicting_sync_targets(self):
+ if not self.support_sync:
+ return
+
+ # we would like to detect if we are synchronizing any column
+ # pairs in conflict with another relationship that wishes to sync
+ # an entirely different column to the same target. This is a
+ # very rare edge case so we will try to minimize the memory/overhead
+ # impact of this check
+ for from_, to_ in [
+ (from_, to_) for (from_, to_) in self.synchronize_pairs
+ ] + [
+ (from_, to_) for (from_, to_) in self.secondary_synchronize_pairs
+ ]:
+ # save ourselves a ton of memory and overhead by only
+ # considering columns that are subject to a overlapping
+ # FK constraints at the core level. This condition can arise
+ # if multiple relationships overlap foreign() directly, but
+ # we're going to assume it's typically a ForeignKeyConstraint-
+ # level configuration that benefits from this warning.
+ if len(to_.foreign_keys) < 2:
+ continue
+
+ if to_ not in self._track_overlapping_sync_targets:
+ self._track_overlapping_sync_targets[to_] = \
+ weakref.WeakKeyDictionary({self.prop: from_})
+ else:
+ other_props = []
+ prop_to_from = self._track_overlapping_sync_targets[to_]
+ for pr, fr_ in prop_to_from.items():
+ if pr.mapper in mapperlib._mapper_registry and \
+ fr_ is not from_ and \
+ pr not in self.prop._reverse_property:
+ other_props.append((pr, fr_))
+
+ if other_props:
+ util.warn(
+ "relationship '%s' will copy column %s to column %s, "
+ "which conflicts with relationship(s): %s. "
+ "Consider applying "
+ "viewonly=True to read-only relationships, or provide "
+ "a primaryjoin condition marking writable columns "
+ "with the foreign() annotation." % (
+ self.prop,
+ from_, to_,
+ ", ".join(
+ "'%s' (copies %s to %s)" % (pr, fr_, to_)
+ for (pr, fr_) in other_props)
+ )
+ )
+ self._track_overlapping_sync_targets[to_][self.prop] = from_
+
@util.memoized_property
def remote_columns(self):
return self._gather_join_annotations("remote")
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 13afcb357..f23983cbc 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -292,7 +292,7 @@ class SessionTransaction(object):
for s in self.session.identity_map.all_states():
s._expire(s.dict, self.session.identity_map._modified)
for s in self._deleted:
- s.session_id = None
+ s._detach()
self._deleted.clear()
elif self.nested:
self._parent._new.update(self._new)
@@ -641,14 +641,8 @@ class Session(_SessionClassMethods):
SessionExtension._adapt_listener(self, ext)
if binds is not None:
- for mapperortable, bind in binds.items():
- insp = inspect(mapperortable)
- if insp.is_selectable:
- self.bind_table(mapperortable, bind)
- elif insp.is_mapper:
- self.bind_mapper(mapperortable, bind)
- else:
- assert False
+ for key, bind in binds.items():
+ self._add_bind(key, bind)
if not self.autocommit:
self.begin()
@@ -1026,40 +1020,47 @@ class Session(_SessionClassMethods):
# TODO: + crystallize + document resolution order
# vis. bind_mapper/bind_table
- def bind_mapper(self, mapper, bind):
- """Bind operations for a mapper to a Connectable.
-
- mapper
- A mapper instance or mapped class
+ def _add_bind(self, key, bind):
+ try:
+ insp = inspect(key)
+ except sa_exc.NoInspectionAvailable:
+ if not isinstance(key, type):
+ raise exc.ArgumentError(
+ "Not acceptable bind target: %s" %
+ key)
+ else:
+ self.__binds[key] = bind
+ else:
+ if insp.is_selectable:
+ self.__binds[insp] = bind
+ elif insp.is_mapper:
+ self.__binds[insp.class_] = bind
+ for selectable in insp._all_tables:
+ self.__binds[selectable] = bind
+ else:
+ raise exc.ArgumentError(
+ "Not acceptable bind target: %s" %
+ key)
- bind
- Any Connectable: a :class:`.Engine` or :class:`.Connection`.
+ def bind_mapper(self, mapper, bind):
+ """Associate a :class:`.Mapper` with a "bind", e.g. a :class:`.Engine`
+ or :class:`.Connection`.
- All subsequent operations involving this mapper will use the given
- `bind`.
+ The given mapper is added to a lookup used by the
+ :meth:`.Session.get_bind` method.
"""
- if isinstance(mapper, type):
- mapper = class_mapper(mapper)
-
- self.__binds[mapper.base_mapper] = bind
- for t in mapper._all_tables:
- self.__binds[t] = bind
+ self._add_bind(mapper, bind)
def bind_table(self, table, bind):
- """Bind operations on a Table to a Connectable.
+ """Associate a :class:`.Table` with a "bind", e.g. a :class:`.Engine`
+ or :class:`.Connection`.
- table
- A :class:`.Table` instance
-
- bind
- Any Connectable: a :class:`.Engine` or :class:`.Connection`.
-
- All subsequent operations involving this :class:`.Table` will use the
- given `bind`.
+ The given mapper is added to a lookup used by the
+ :meth:`.Session.get_bind` method.
"""
- self.__binds[table] = bind
+ self._add_bind(table, bind)
def get_bind(self, mapper=None, clause=None):
"""Return a "bind" to which this :class:`.Session` is bound.
@@ -1113,6 +1114,7 @@ class Session(_SessionClassMethods):
bound :class:`.MetaData`.
"""
+
if mapper is clause is None:
if self.bind:
return self.bind
@@ -1122,15 +1124,23 @@ class Session(_SessionClassMethods):
"Connection, and no context was provided to locate "
"a binding.")
- c_mapper = mapper is not None and _class_to_mapper(mapper) or None
+ if mapper is not None:
+ try:
+ mapper = inspect(mapper)
+ except sa_exc.NoInspectionAvailable:
+ if isinstance(mapper, type):
+ raise exc.UnmappedClassError(mapper)
+ else:
+ raise
- # manually bound?
if self.__binds:
- if c_mapper:
- if c_mapper.base_mapper in self.__binds:
- return self.__binds[c_mapper.base_mapper]
- elif c_mapper.mapped_table in self.__binds:
- return self.__binds[c_mapper.mapped_table]
+ if mapper:
+ for cls in mapper.class_.__mro__:
+ if cls in self.__binds:
+ return self.__binds[cls]
+ if clause is None:
+ clause = mapper.mapped_table
+
if clause is not None:
for t in sql_util.find_tables(clause, include_crud=True):
if t in self.__binds:
@@ -1142,12 +1152,12 @@ class Session(_SessionClassMethods):
if isinstance(clause, sql.expression.ClauseElement) and clause.bind:
return clause.bind
- if c_mapper and c_mapper.mapped_table.bind:
- return c_mapper.mapped_table.bind
+ if mapper and mapper.mapped_table.bind:
+ return mapper.mapped_table.bind
context = []
if mapper is not None:
- context.append('mapper %s' % c_mapper)
+ context.append('mapper %s' % mapper)
if clause is not None:
context.append('SQL expression')
@@ -1399,6 +1409,7 @@ class Session(_SessionClassMethods):
state._detach()
elif self.transaction:
self.transaction._deleted.pop(state, None)
+ state._detach()
def _register_newly_persistent(self, states):
for state in states:
@@ -2439,16 +2450,19 @@ def make_transient_to_detached(instance):
def object_session(instance):
- """Return the ``Session`` to which instance belongs.
+ """Return the :class:`.Session` to which the given instance belongs.
- If the instance is not a mapped instance, an error is raised.
+ This is essentially the same as the :attr:`.InstanceState.session`
+ accessor. See that attribute for details.
"""
try:
- return _state_session(attributes.instance_state(instance))
+ state = attributes.instance_state(instance)
except exc.NO_STATE:
raise exc.UnmappedInstanceError(instance)
+ else:
+ return _state_session(state)
_new_sessionid = util.counter()
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 3c12fda1a..560149de5 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -145,7 +145,16 @@ class InstanceState(interfaces.InspectionAttr):
@util.dependencies("sqlalchemy.orm.session")
def session(self, sessionlib):
"""Return the owning :class:`.Session` for this instance,
- or ``None`` if none available."""
+ or ``None`` if none available.
+
+ Note that the result here can in some cases be *different*
+ from that of ``obj in session``; an object that's been deleted
+ will report as not ``in session``, however if the transaction is
+ still in progress, this attribute will still refer to that session.
+ Only when the transaction is completed does the object become
+ fully detached under normal circumstances.
+
+ """
return sessionlib._state_session(self)
@property
@@ -258,8 +267,8 @@ class InstanceState(interfaces.InspectionAttr):
try:
return manager.original_init(*mixed[1:], **kwargs)
except:
- manager.dispatch.init_failure(self, args, kwargs)
- raise
+ with util.safe_reraise():
+ manager.dispatch.init_failure(self, args, kwargs)
def get_history(self, key, passive):
return self.manager[key].impl.get_history(self, self.dict, passive)
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 734f9d5e6..ad610a4ac 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -30,13 +30,10 @@ class CascadeOptions(frozenset):
'all', 'none', 'delete-orphan'])
_allowed_cascades = all_cascades
- def __new__(cls, arg):
- values = set([
- c for c
- in re.split('\s*,\s*', arg or "")
- if c
- ])
-
+ def __new__(cls, value_list):
+ if isinstance(value_list, str) or value_list is None:
+ return cls.from_string(value_list)
+ values = set(value_list)
if values.difference(cls._allowed_cascades):
raise sa_exc.ArgumentError(
"Invalid cascade option(s): %s" %
@@ -70,6 +67,14 @@ class CascadeOptions(frozenset):
",".join([x for x in sorted(self)])
)
+ @classmethod
+ def from_string(cls, arg):
+ values = [
+ c for c
+ in re.split('\s*,\s*', arg or "")
+ if c
+ ]
+ return cls(values)
def _validator_events(
desc, key, validator, include_removes, include_backrefs):
@@ -804,6 +809,16 @@ class _ORMJoin(expression.Join):
expression.Join.__init__(self, left, right, onclause, isouter)
+ if not prop and getattr(right_info, 'mapper', None) \
+ and right_info.mapper.single:
+ # if single inheritance target and we are using a manual
+ # or implicit ON clause, augment it the same way we'd augment the
+ # WHERE.
+ single_crit = right_info.mapper._single_table_criterion
+ if right_info.is_aliased_class:
+ single_crit = right_info._adapter.traverse(single_crit)
+ self.onclause = self.onclause & single_crit
+
def join(self, right, onclause=None, isouter=False, join_to_left=None):
return _ORMJoin(self, right, onclause, isouter)
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index bc9affe4a..a174df784 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -248,9 +248,7 @@ class Pool(log.Identified):
self.logger.debug("Closing connection %r", connection)
try:
self._dialect.do_close(connection)
- except (SystemExit, KeyboardInterrupt):
- raise
- except:
+ except Exception:
self.logger.error("Exception closing connection %r",
connection, exc_info=True)
@@ -441,8 +439,8 @@ class _ConnectionRecord(object):
try:
dbapi_connection = rec.get_connection()
except:
- rec.checkin()
- raise
+ with util.safe_reraise():
+ rec.checkin()
echo = pool._should_log_debug()
fairy = _ConnectionFairy(dbapi_connection, rec, echo)
rec.fairy_ref = weakref.ref(
@@ -569,12 +567,12 @@ def _finalize_fairy(connection, connection_record,
# Immediately close detached instances
if not connection_record:
pool._close_connection(connection)
- except Exception as e:
+ except BaseException as e:
pool.logger.error(
"Exception during reset or similar", exc_info=True)
if connection_record:
connection_record.invalidate(e=e)
- if isinstance(e, (SystemExit, KeyboardInterrupt)):
+ if not isinstance(e, Exception):
raise
if connection_record:
@@ -842,9 +840,7 @@ class SingletonThreadPool(Pool):
for conn in self._all_conns:
try:
conn.close()
- except (SystemExit, KeyboardInterrupt):
- raise
- except:
+ except Exception:
# pysqlite won't even let you close a conn from a thread
# that didn't create it
pass
@@ -962,8 +958,8 @@ class QueuePool(Pool):
try:
return self._create_connection()
except:
- self._dec_overflow()
- raise
+ with util.safe_reraise():
+ self._dec_overflow()
else:
return self._do_get()
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index 4d013859c..351e08d0b 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -38,6 +38,7 @@ from .expression import (
false,
False_,
func,
+ funcfilter,
insert,
intersect,
intersect_all,
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 72dd11eaf..5fa78ad0f 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -24,12 +24,10 @@ To generate user-defined SQL strings, see
"""
import re
-from . import schema, sqltypes, operators, functions, \
- util as sql_util, visitors, elements, selectable, base
+from . import schema, sqltypes, operators, functions, visitors, \
+ elements, selectable, crud
from .. import util, exc
-import decimal
import itertools
-import operator
RESERVED_WORDS = set([
'all', 'analyse', 'analyze', 'and', 'any', 'array',
@@ -64,17 +62,6 @@ BIND_TEMPLATES = {
'named': ":%(name)s"
}
-REQUIRED = util.symbol('REQUIRED', """
-Placeholder for the value within a :class:`.BindParameter`
-which is required to be present when the statement is passed
-to :meth:`.Connection.execute`.
-
-This symbol is typically used when a :func:`.expression.insert`
-or :func:`.expression.update` statement is compiled without parameter
-values present.
-
-""")
-
OPERATORS = {
# binary
@@ -503,7 +490,35 @@ class SQLCompiler(Compiled):
def visit_grouping(self, grouping, asfrom=False, **kwargs):
return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")"
- def visit_label_reference(self, element, **kwargs):
+ def visit_label_reference(
+ self, element, within_columns_clause=False, **kwargs):
+ if self.stack and self.dialect.supports_simple_order_by_label:
+ selectable = self.stack[-1]['selectable']
+
+ with_cols, only_froms = selectable._label_resolve_dict
+ if within_columns_clause:
+ resolve_dict = only_froms
+ else:
+ resolve_dict = with_cols
+
+ # this can be None in the case that a _label_reference()
+ # were subject to a replacement operation, in which case
+ # the replacement of the Label element may have changed
+ # to something else like a ColumnClause expression.
+ order_by_elem = element.element._order_by_label_element
+
+ if order_by_elem is not None and order_by_elem.name in \
+ resolve_dict:
+
+ kwargs['render_label_as_label'] = \
+ element.element._order_by_label_element
+
+ return self.process(
+ element.element, within_columns_clause=within_columns_clause,
+ **kwargs)
+
+ def visit_textual_label_reference(
+ self, element, within_columns_clause=False, **kwargs):
if not self.stack:
# compiling the element outside of the context of a SELECT
return self.process(
@@ -511,19 +526,25 @@ class SQLCompiler(Compiled):
)
selectable = self.stack[-1]['selectable']
+ with_cols, only_froms = selectable._label_resolve_dict
+
try:
- col = selectable._label_resolve_dict[element.text]
+ if within_columns_clause:
+ col = only_froms[element.element]
+ else:
+ col = with_cols[element.element]
except KeyError:
# treat it like text()
util.warn_limited(
"Can't resolve label reference %r; converting to text()",
- util.ellipses_string(element.text))
+ util.ellipses_string(element.element))
return self.process(
element._text_clause
)
else:
kwargs['render_label_as_label'] = col
- return self.process(col, **kwargs)
+ return self.process(
+ col, within_columns_clause=within_columns_clause, **kwargs)
def visit_label(self, label,
add_to_result_map=None,
@@ -678,11 +699,7 @@ class SQLCompiler(Compiled):
else:
return "0"
- def visit_clauselist(self, clauselist, order_by_select=None, **kw):
- if order_by_select is not None:
- return self._order_by_clauselist(
- clauselist, order_by_select, **kw)
-
+ def visit_clauselist(self, clauselist, **kw):
sep = clauselist.operator
if sep is None:
sep = " "
@@ -695,27 +712,6 @@ class SQLCompiler(Compiled):
for c in clauselist.clauses)
if s)
- def _order_by_clauselist(self, clauselist, order_by_select, **kw):
- # look through raw columns collection for labels.
- # note that its OK we aren't expanding tables and other selectables
- # here; we can only add a label in the ORDER BY for an individual
- # label expression in the columns clause.
-
- raw_col = set(order_by_select._label_resolve_dict.keys())
-
- return ", ".join(
- s for s in
- (
- c._compiler_dispatch(
- self,
- render_label_as_label=c._order_by_label_element if
- c._order_by_label_element is not None and
- c._order_by_label_element._label in raw_col
- else None,
- **kw)
- for c in clauselist.clauses)
- if s)
-
def visit_case(self, clause, **kwargs):
x = "CASE "
if clause.value is not None:
@@ -750,6 +746,12 @@ class SQLCompiler(Compiled):
)
)
+ def visit_funcfilter(self, funcfilter, **kwargs):
+ return "%s FILTER (WHERE %s)" % (
+ funcfilter.func._compiler_dispatch(self, **kwargs),
+ funcfilter.criterion._compiler_dispatch(self, **kwargs)
+ )
+
def visit_extract(self, extract, **kwargs):
field = self.extract_map.get(extract.field, extract.field)
return "EXTRACT(%s FROM %s)" % (
@@ -809,8 +811,9 @@ class SQLCompiler(Compiled):
text += " GROUP BY " + group_by
text += self.order_by_clause(cs, **kwargs)
- text += (cs._limit_clause is not None or cs._offset_clause is not None) and \
- self.limit_clause(cs) or ""
+ text += (cs._limit_clause is not None
+ or cs._offset_clause is not None) and \
+ self.limit_clause(cs, **kwargs) or ""
if self.ctes and \
compound_index == 0 and toplevel:
@@ -866,15 +869,15 @@ class SQLCompiler(Compiled):
isinstance(binary.right, elements.BindParameter):
kw['literal_binds'] = True
- operator = binary.operator
- disp = getattr(self, "visit_%s_binary" % operator.__name__, None)
+ operator_ = binary.operator
+ disp = getattr(self, "visit_%s_binary" % operator_.__name__, None)
if disp:
- return disp(binary, operator, **kw)
+ return disp(binary, operator_, **kw)
else:
try:
- opstring = OPERATORS[operator]
+ opstring = OPERATORS[operator_]
except KeyError:
- raise exc.UnsupportedCompilationError(self, operator)
+ raise exc.UnsupportedCompilationError(self, operator_)
else:
return self._generate_generic_binary(binary, opstring, **kw)
@@ -956,7 +959,7 @@ class SQLCompiler(Compiled):
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
- )
+ )
def visit_notlike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
@@ -967,7 +970,7 @@ class SQLCompiler(Compiled):
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
- )
+ )
def visit_ilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
@@ -978,7 +981,7 @@ class SQLCompiler(Compiled):
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
- )
+ )
def visit_notilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
@@ -989,7 +992,7 @@ class SQLCompiler(Compiled):
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
- )
+ )
def visit_between_op_binary(self, binary, operator, **kw):
symmetric = binary.modifiers.get("symmetric", False)
@@ -1321,6 +1324,9 @@ class SQLCompiler(Compiled):
def get_crud_hint_text(self, table, text):
return None
+ def get_statement_hint_text(self, hint_texts):
+ return " ".join(hint_texts)
+
def _transform_select_for_nested_joins(self, select):
"""Rewrite any "a JOIN (b JOIN c)" expression as
"a JOIN (select * from b JOIN c) AS anon", to support
@@ -1491,29 +1497,7 @@ class SQLCompiler(Compiled):
select, transformed_select)
return text
- correlate_froms = entry['correlate_froms']
- asfrom_froms = entry['asfrom_froms']
-
- if asfrom:
- froms = select._get_display_froms(
- explicit_correlate_froms=correlate_froms.difference(
- asfrom_froms),
- implicit_correlate_froms=())
- else:
- froms = select._get_display_froms(
- explicit_correlate_froms=correlate_froms,
- implicit_correlate_froms=asfrom_froms)
-
- new_correlate_froms = set(selectable._from_objects(*froms))
- all_correlate_froms = new_correlate_froms.union(correlate_froms)
-
- new_entry = {
- 'asfrom_froms': new_correlate_froms,
- 'iswrapper': iswrapper,
- 'correlate_froms': all_correlate_froms,
- 'selectable': select,
- }
- self.stack.append(new_entry)
+ froms = self._setup_select_stack(select, entry, asfrom, iswrapper)
column_clause_args = kwargs.copy()
column_clause_args.update({
@@ -1524,18 +1508,11 @@ class SQLCompiler(Compiled):
text = "SELECT " # we're off to a good start !
if select._hints:
- byfrom = dict([
- (from_, hinttext % {
- 'name': from_._compiler_dispatch(
- self, ashint=True)
- })
- for (from_, dialect), hinttext in
- select._hints.items()
- if dialect in ('*', self.dialect.name)
- ])
- hint_text = self.get_select_hint_text(byfrom)
+ hint_text, byfrom = self._setup_select_hints(select)
if hint_text:
text += hint_text + " "
+ else:
+ byfrom = None
if select._prefixes:
text += self._generate_prefixes(
@@ -1556,6 +1533,70 @@ class SQLCompiler(Compiled):
if c is not None
]
+ text = self._compose_select_body(
+ text, select, inner_columns, froms, byfrom, kwargs)
+
+ if select._statement_hints:
+ per_dialect = [
+ ht for (dialect_name, ht)
+ in select._statement_hints
+ if dialect_name in ('*', self.dialect.name)
+ ]
+ if per_dialect:
+ text += " " + self.get_statement_hint_text(per_dialect)
+
+ if self.ctes and \
+ compound_index == 0 and toplevel:
+ text = self._render_cte_clause() + text
+
+ self.stack.pop(-1)
+
+ if asfrom and parens:
+ return "(" + text + ")"
+ else:
+ return text
+
+ def _setup_select_hints(self, select):
+ byfrom = dict([
+ (from_, hinttext % {
+ 'name': from_._compiler_dispatch(
+ self, ashint=True)
+ })
+ for (from_, dialect), hinttext in
+ select._hints.items()
+ if dialect in ('*', self.dialect.name)
+ ])
+ hint_text = self.get_select_hint_text(byfrom)
+ return hint_text, byfrom
+
+ def _setup_select_stack(self, select, entry, asfrom, iswrapper):
+ correlate_froms = entry['correlate_froms']
+ asfrom_froms = entry['asfrom_froms']
+
+ if asfrom:
+ froms = select._get_display_froms(
+ explicit_correlate_froms=correlate_froms.difference(
+ asfrom_froms),
+ implicit_correlate_froms=())
+ else:
+ froms = select._get_display_froms(
+ explicit_correlate_froms=correlate_froms,
+ implicit_correlate_froms=asfrom_froms)
+
+ new_correlate_froms = set(selectable._from_objects(*froms))
+ all_correlate_froms = new_correlate_froms.union(correlate_froms)
+
+ new_entry = {
+ 'asfrom_froms': new_correlate_froms,
+ 'iswrapper': iswrapper,
+ 'correlate_froms': all_correlate_froms,
+ 'selectable': select,
+ }
+ self.stack.append(new_entry)
+ return froms
+
+ def _compose_select_body(
+ self, text, select, inner_columns, froms, byfrom, kwargs):
text += ', '.join(inner_columns)
if froms:
@@ -1590,13 +1631,7 @@ class SQLCompiler(Compiled):
text += " \nHAVING " + t
if select._order_by_clause.clauses:
- if self.dialect.supports_simple_order_by_label:
- order_by_select = select
- else:
- order_by_select = None
-
- text += self.order_by_clause(
- select, order_by_select=order_by_select, **kwargs)
+ text += self.order_by_clause(select, **kwargs)
if (select._limit_clause is not None or
select._offset_clause is not None):
@@ -1605,16 +1640,7 @@ class SQLCompiler(Compiled):
if select._for_update_arg is not None:
text += self.for_update_clause(select, **kwargs)
- if self.ctes and \
- compound_index == 0 and toplevel:
- text = self._render_cte_clause() + text
-
- self.stack.pop(-1)
-
- if asfrom and parens:
- return "(" + text + ")"
- else:
- return text
+ return text
def _generate_prefixes(self, stmt, prefixes, **kw):
clause = " ".join(
@@ -1704,9 +1730,9 @@ class SQLCompiler(Compiled):
def visit_insert(self, insert_stmt, **kw):
self.isinsert = True
- colparams = self._get_colparams(insert_stmt, **kw)
+ crud_params = crud._get_crud_params(self, insert_stmt, **kw)
- if not colparams and \
+ if not crud_params and \
not self.dialect.supports_default_values and \
not self.dialect.supports_empty_insert:
raise exc.CompileError("The '%s' dialect with current database "
@@ -1721,9 +1747,9 @@ class SQLCompiler(Compiled):
"version settings does not support "
"in-place multirow inserts." %
self.dialect.name)
- colparams_single = colparams[0]
+ crud_params_single = crud_params[0]
else:
- colparams_single = colparams
+ crud_params_single = crud_params
preparer = self.preparer
supports_default_values = self.dialect.supports_default_values
@@ -1754,9 +1780,9 @@ class SQLCompiler(Compiled):
text += table_text
- if colparams_single or not supports_default_values:
+ if crud_params_single or not supports_default_values:
text += " (%s)" % ', '.join([preparer.format_column(c[0])
- for c in colparams_single])
+ for c in crud_params_single])
if self.returning or insert_stmt._returning:
self.returning = self.returning or insert_stmt._returning
@@ -1767,21 +1793,21 @@ class SQLCompiler(Compiled):
text += " " + returning_clause
if insert_stmt.select is not None:
- text += " %s" % self.process(insert_stmt.select, **kw)
- elif not colparams and supports_default_values:
+ text += " %s" % self.process(self._insert_from_select, **kw)
+ elif not crud_params and supports_default_values:
text += " DEFAULT VALUES"
elif insert_stmt._has_multi_parameters:
text += " VALUES %s" % (
", ".join(
"(%s)" % (
- ', '.join(c[1] for c in colparam_set)
+ ', '.join(c[1] for c in crud_param_set)
)
- for colparam_set in colparams
+ for crud_param_set in crud_params
)
)
else:
text += " VALUES (%s)" % \
- ', '.join([c[1] for c in colparams])
+ ', '.join([c[1] for c in crud_params])
if self.returning and not self.returning_precedes_values:
text += " " + returning_clause
@@ -1838,7 +1864,7 @@ class SQLCompiler(Compiled):
table_text = self.update_tables_clause(update_stmt, update_stmt.table,
extra_froms, **kw)
- colparams = self._get_colparams(update_stmt, **kw)
+ crud_params = crud._get_crud_params(self, update_stmt, **kw)
if update_stmt._hints:
dialect_hints = dict([
@@ -1865,7 +1891,7 @@ class SQLCompiler(Compiled):
text += ', '.join(
c[0]._compiler_dispatch(self,
include_table=include_table) +
- '=' + c[1] for c in colparams
+ '=' + c[1] for c in crud_params
)
if self.returning or update_stmt._returning:
@@ -1901,380 +1927,9 @@ class SQLCompiler(Compiled):
return text
- def _create_crud_bind_param(self, col, value, required=False, name=None):
- if name is None:
- name = col.key
- bindparam = elements.BindParameter(name, value,
- type_=col.type, required=required)
- bindparam._is_crud = True
- return bindparam._compiler_dispatch(self)
-
@util.memoized_property
def _key_getters_for_crud_column(self):
- if self.isupdate and self.statement._extra_froms:
- # when extra tables are present, refer to the columns
- # in those extra tables as table-qualified, including in
- # dictionaries and when rendering bind param names.
- # the "main" table of the statement remains unqualified,
- # allowing the most compatibility with a non-multi-table
- # statement.
- _et = set(self.statement._extra_froms)
-
- def _column_as_key(key):
- str_key = elements._column_as_key(key)
- if hasattr(key, 'table') and key.table in _et:
- return (key.table.name, str_key)
- else:
- return str_key
-
- def _getattr_col_key(col):
- if col.table in _et:
- return (col.table.name, col.key)
- else:
- return col.key
-
- def _col_bind_name(col):
- if col.table in _et:
- return "%s_%s" % (col.table.name, col.key)
- else:
- return col.key
-
- else:
- _column_as_key = elements._column_as_key
- _getattr_col_key = _col_bind_name = operator.attrgetter("key")
-
- return _column_as_key, _getattr_col_key, _col_bind_name
-
- def _get_colparams(self, stmt, **kw):
- """create a set of tuples representing column/string pairs for use
- in an INSERT or UPDATE statement.
-
- Also generates the Compiled object's postfetch, prefetch, and
- returning column collections, used for default handling and ultimately
- populating the ResultProxy's prefetch_cols() and postfetch_cols()
- collections.
-
- """
-
- self.postfetch = []
- self.prefetch = []
- self.returning = []
-
- # no parameters in the statement, no parameters in the
- # compiled params - return binds for all columns
- if self.column_keys is None and stmt.parameters is None:
- return [
- (c, self._create_crud_bind_param(c,
- None, required=True))
- for c in stmt.table.columns
- ]
-
- if stmt._has_multi_parameters:
- stmt_parameters = stmt.parameters[0]
- else:
- stmt_parameters = stmt.parameters
-
- # getters - these are normally just column.key,
- # but in the case of mysql multi-table update, the rules for
- # .key must conditionally take tablename into account
- _column_as_key, _getattr_col_key, _col_bind_name = \
- self._key_getters_for_crud_column
-
- # if we have statement parameters - set defaults in the
- # compiled params
- if self.column_keys is None:
- parameters = {}
- else:
- parameters = dict((_column_as_key(key), REQUIRED)
- for key in self.column_keys
- if not stmt_parameters or
- key not in stmt_parameters)
-
- # create a list of column assignment clauses as tuples
- values = []
-
- if stmt_parameters is not None:
- for k, v in stmt_parameters.items():
- colkey = _column_as_key(k)
- if colkey is not None:
- parameters.setdefault(colkey, v)
- else:
- # a non-Column expression on the left side;
- # add it to values() in an "as-is" state,
- # coercing right side to bound param
- if elements._is_literal(v):
- v = self.process(
- elements.BindParameter(None, v, type_=k.type),
- **kw)
- else:
- v = self.process(v.self_group(), **kw)
-
- values.append((k, v))
-
- need_pks = self.isinsert and \
- not self.inline and \
- not stmt._returning and \
- not stmt._has_multi_parameters
-
- implicit_returning = need_pks and \
- self.dialect.implicit_returning and \
- stmt.table.implicit_returning
-
- if self.isinsert:
- implicit_return_defaults = (implicit_returning and
- stmt._return_defaults)
- elif self.isupdate:
- implicit_return_defaults = (self.dialect.implicit_returning and
- stmt.table.implicit_returning and
- stmt._return_defaults)
- else:
- implicit_return_defaults = False
-
- if implicit_return_defaults:
- if stmt._return_defaults is True:
- implicit_return_defaults = set(stmt.table.c)
- else:
- implicit_return_defaults = set(stmt._return_defaults)
-
- postfetch_lastrowid = need_pks and self.dialect.postfetch_lastrowid
-
- check_columns = {}
-
- # special logic that only occurs for multi-table UPDATE
- # statements
- if self.isupdate and stmt._extra_froms and stmt_parameters:
- normalized_params = dict(
- (elements._clause_element_as_expr(c), param)
- for c, param in stmt_parameters.items()
- )
- affected_tables = set()
- for t in stmt._extra_froms:
- for c in t.c:
- if c in normalized_params:
- affected_tables.add(t)
- check_columns[_getattr_col_key(c)] = c
- value = normalized_params[c]
- if elements._is_literal(value):
- value = self._create_crud_bind_param(
- c, value, required=value is REQUIRED,
- name=_col_bind_name(c))
- else:
- self.postfetch.append(c)
- value = self.process(value.self_group(), **kw)
- values.append((c, value))
- # determine tables which are actually
- # to be updated - process onupdate and
- # server_onupdate for these
- for t in affected_tables:
- for c in t.c:
- if c in normalized_params:
- continue
- elif (c.onupdate is not None and not
- c.onupdate.is_sequence):
- if c.onupdate.is_clause_element:
- values.append(
- (c, self.process(
- c.onupdate.arg.self_group(),
- **kw)
- )
- )
- self.postfetch.append(c)
- else:
- values.append(
- (c, self._create_crud_bind_param(
- c, None, name=_col_bind_name(c)
- )
- )
- )
- self.prefetch.append(c)
- elif c.server_onupdate is not None:
- self.postfetch.append(c)
-
- if self.isinsert and stmt.select_names:
- # for an insert from select, we can only use names that
- # are given, so only select for those names.
- cols = (stmt.table.c[_column_as_key(name)]
- for name in stmt.select_names)
- else:
- # iterate through all table columns to maintain
- # ordering, even for those cols that aren't included
- cols = stmt.table.columns
-
- for c in cols:
- col_key = _getattr_col_key(c)
- if col_key in parameters and col_key not in check_columns:
- value = parameters.pop(col_key)
- if elements._is_literal(value):
- value = self._create_crud_bind_param(
- c, value, required=value is REQUIRED,
- name=_col_bind_name(c)
- if not stmt._has_multi_parameters
- else "%s_0" % _col_bind_name(c)
- )
- else:
- if isinstance(value, elements.BindParameter) and \
- value.type._isnull:
- value = value._clone()
- value.type = c.type
-
- if c.primary_key and implicit_returning:
- self.returning.append(c)
- value = self.process(value.self_group(), **kw)
- elif implicit_return_defaults and \
- c in implicit_return_defaults:
- self.returning.append(c)
- value = self.process(value.self_group(), **kw)
- else:
- self.postfetch.append(c)
- value = self.process(value.self_group(), **kw)
- values.append((c, value))
-
- elif self.isinsert:
- if c.primary_key and \
- need_pks and \
- (
- implicit_returning or
- not postfetch_lastrowid or
- c is not stmt.table._autoincrement_column
- ):
-
- if implicit_returning:
- if c.default is not None:
- if c.default.is_sequence:
- if self.dialect.supports_sequences and \
- (not c.default.optional or
- not self.dialect.sequences_optional):
- proc = self.process(c.default, **kw)
- values.append((c, proc))
- self.returning.append(c)
- elif c.default.is_clause_element:
- values.append(
- (c, self.process(
- c.default.arg.self_group(), **kw))
- )
- self.returning.append(c)
- else:
- values.append(
- (c, self._create_crud_bind_param(c, None))
- )
- self.prefetch.append(c)
- else:
- self.returning.append(c)
- else:
- if (
- (c.default is not None and
- (not c.default.is_sequence or
- self.dialect.supports_sequences)) or
- c is stmt.table._autoincrement_column and
- (self.dialect.supports_sequences or
- self.dialect.
- preexecute_autoincrement_sequences)
- ):
-
- values.append(
- (c, self._create_crud_bind_param(c, None))
- )
-
- self.prefetch.append(c)
-
- elif c.default is not None:
- if c.default.is_sequence:
- if self.dialect.supports_sequences and \
- (not c.default.optional or
- not self.dialect.sequences_optional):
- proc = self.process(c.default, **kw)
- values.append((c, proc))
- if implicit_return_defaults and \
- c in implicit_return_defaults:
- self.returning.append(c)
- elif not c.primary_key:
- self.postfetch.append(c)
- elif c.default.is_clause_element:
- values.append(
- (c, self.process(
- c.default.arg.self_group(), **kw))
- )
-
- if implicit_return_defaults and \
- c in implicit_return_defaults:
- self.returning.append(c)
- elif not c.primary_key:
- # don't add primary key column to postfetch
- self.postfetch.append(c)
- else:
- values.append(
- (c, self._create_crud_bind_param(c, None))
- )
- self.prefetch.append(c)
- elif c.server_default is not None:
- if implicit_return_defaults and \
- c in implicit_return_defaults:
- self.returning.append(c)
- elif not c.primary_key:
- self.postfetch.append(c)
- elif implicit_return_defaults and \
- c in implicit_return_defaults:
- self.returning.append(c)
-
- elif self.isupdate:
- if c.onupdate is not None and not c.onupdate.is_sequence:
- if c.onupdate.is_clause_element:
- values.append(
- (c, self.process(
- c.onupdate.arg.self_group(), **kw))
- )
- if implicit_return_defaults and \
- c in implicit_return_defaults:
- self.returning.append(c)
- else:
- self.postfetch.append(c)
- else:
- values.append(
- (c, self._create_crud_bind_param(c, None))
- )
- self.prefetch.append(c)
- elif c.server_onupdate is not None:
- if implicit_return_defaults and \
- c in implicit_return_defaults:
- self.returning.append(c)
- else:
- self.postfetch.append(c)
- elif implicit_return_defaults and \
- c in implicit_return_defaults:
- self.returning.append(c)
-
- if parameters and stmt_parameters:
- check = set(parameters).intersection(
- _column_as_key(k) for k in stmt.parameters
- ).difference(check_columns)
- if check:
- raise exc.CompileError(
- "Unconsumed column names: %s" %
- (", ".join("%s" % c for c in check))
- )
-
- if stmt._has_multi_parameters:
- values_0 = values
- values = [values]
-
- values.extend(
- [
- (
- c,
- (self._create_crud_bind_param(
- c, row[c.key],
- name="%s_%d" % (c.key, i + 1)
- ) if elements._is_literal(row[c.key])
- else self.process(
- row[c.key].self_group(), **kw))
- if c.key in row else param
- )
- for (c, param) in values_0
- ]
- for i, row in enumerate(stmt.parameters[1:])
- )
-
- return values
+ return crud._key_getters_for_crud_column(self)
def visit_delete(self, delete_stmt, **kw):
self.stack.append({'correlate_froms': set([delete_stmt.table]),
@@ -2458,17 +2113,18 @@ class DDLCompiler(Compiled):
constraints.extend([c for c in table._sorted_constraints
if c is not table.primary_key])
- return ", \n\t".join(p for p in
- (self.process(constraint)
- for constraint in constraints
- if (
- constraint._create_rule is None or
- constraint._create_rule(self))
- and (
- not self.dialect.supports_alter or
- not getattr(constraint, 'use_alter', False)
- )) if p is not None
- )
+ return ", \n\t".join(
+ p for p in
+ (self.process(constraint)
+ for constraint in constraints
+ if (
+ constraint._create_rule is None or
+ constraint._create_rule(self))
+ and (
+ not self.dialect.supports_alter or
+ not getattr(constraint, 'use_alter', False)
+ )) if p is not None
+ )
def visit_drop_table(self, drop):
return "\nDROP TABLE " + self.preparer.format_table(drop.element)
diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py
new file mode 100644
index 000000000..831d05be1
--- /dev/null
+++ b/lib/sqlalchemy/sql/crud.py
@@ -0,0 +1,530 @@
+# sql/crud.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Functions used by compiler.py to determine the parameters rendered
+within INSERT and UPDATE statements.
+
+"""
+from .. import util
+from .. import exc
+from . import elements
+import operator
+
+REQUIRED = util.symbol('REQUIRED', """
+Placeholder for the value within a :class:`.BindParameter`
+which is required to be present when the statement is passed
+to :meth:`.Connection.execute`.
+
+This symbol is typically used when a :func:`.expression.insert`
+or :func:`.expression.update` statement is compiled without parameter
+values present.
+
+""")
+
+
+def _get_crud_params(compiler, stmt, **kw):
+ """create a set of tuples representing column/string pairs for use
+ in an INSERT or UPDATE statement.
+
+ Also generates the Compiled object's postfetch, prefetch, and
+ returning column collections, used for default handling and ultimately
+ populating the ResultProxy's prefetch_cols() and postfetch_cols()
+ collections.
+
+ """
+
+ compiler.postfetch = []
+ compiler.prefetch = []
+ compiler.returning = []
+
+ # no parameters in the statement, no parameters in the
+ # compiled params - return binds for all columns
+ if compiler.column_keys is None and stmt.parameters is None:
+ return [
+ (c, _create_bind_param(
+ compiler, c, None, required=True))
+ for c in stmt.table.columns
+ ]
+
+ if stmt._has_multi_parameters:
+ stmt_parameters = stmt.parameters[0]
+ else:
+ stmt_parameters = stmt.parameters
+
+ # getters - these are normally just column.key,
+ # but in the case of mysql multi-table update, the rules for
+ # .key must conditionally take tablename into account
+ _column_as_key, _getattr_col_key, _col_bind_name = \
+ _key_getters_for_crud_column(compiler)
+
+ # if we have statement parameters - set defaults in the
+ # compiled params
+ if compiler.column_keys is None:
+ parameters = {}
+ else:
+ parameters = dict((_column_as_key(key), REQUIRED)
+ for key in compiler.column_keys
+ if not stmt_parameters or
+ key not in stmt_parameters)
+
+ # create a list of column assignment clauses as tuples
+ values = []
+
+ if stmt_parameters is not None:
+ _get_stmt_parameters_params(
+ compiler,
+ parameters, stmt_parameters, _column_as_key, values, kw)
+
+ check_columns = {}
+
+ # special logic that only occurs for multi-table UPDATE
+ # statements
+ if compiler.isupdate and stmt._extra_froms and stmt_parameters:
+ _get_multitable_params(
+ compiler, stmt, stmt_parameters, check_columns,
+ _col_bind_name, _getattr_col_key, values, kw)
+
+ if compiler.isinsert and stmt.select_names:
+ _scan_insert_from_select_cols(
+ compiler, stmt, parameters,
+ _getattr_col_key, _column_as_key,
+ _col_bind_name, check_columns, values, kw)
+ else:
+ _scan_cols(
+ compiler, stmt, parameters,
+ _getattr_col_key, _column_as_key,
+ _col_bind_name, check_columns, values, kw)
+
+ if parameters and stmt_parameters:
+ check = set(parameters).intersection(
+ _column_as_key(k) for k in stmt.parameters
+ ).difference(check_columns)
+ if check:
+ raise exc.CompileError(
+ "Unconsumed column names: %s" %
+ (", ".join("%s" % c for c in check))
+ )
+
+ if stmt._has_multi_parameters:
+ values = _extend_values_for_multiparams(compiler, stmt, values, kw)
+
+ return values
+
+
+def _create_bind_param(
+ compiler, col, value, process=True, required=False, name=None):
+ if name is None:
+ name = col.key
+ bindparam = elements.BindParameter(name, value,
+ type_=col.type, required=required)
+ bindparam._is_crud = True
+ if process:
+ bindparam = bindparam._compiler_dispatch(compiler)
+ return bindparam
+
+
+def _key_getters_for_crud_column(compiler):
+ if compiler.isupdate and compiler.statement._extra_froms:
+ # when extra tables are present, refer to the columns
+ # in those extra tables as table-qualified, including in
+ # dictionaries and when rendering bind param names.
+ # the "main" table of the statement remains unqualified,
+ # allowing the most compatibility with a non-multi-table
+ # statement.
+ _et = set(compiler.statement._extra_froms)
+
+ def _column_as_key(key):
+ str_key = elements._column_as_key(key)
+ if hasattr(key, 'table') and key.table in _et:
+ return (key.table.name, str_key)
+ else:
+ return str_key
+
+ def _getattr_col_key(col):
+ if col.table in _et:
+ return (col.table.name, col.key)
+ else:
+ return col.key
+
+ def _col_bind_name(col):
+ if col.table in _et:
+ return "%s_%s" % (col.table.name, col.key)
+ else:
+ return col.key
+
+ else:
+ _column_as_key = elements._column_as_key
+ _getattr_col_key = _col_bind_name = operator.attrgetter("key")
+
+ return _column_as_key, _getattr_col_key, _col_bind_name
+
+
+def _scan_insert_from_select_cols(
+ compiler, stmt, parameters, _getattr_col_key,
+ _column_as_key, _col_bind_name, check_columns, values, kw):
+
+ need_pks, implicit_returning, \
+ implicit_return_defaults, postfetch_lastrowid = \
+ _get_returning_modifiers(compiler, stmt)
+
+ cols = [stmt.table.c[_column_as_key(name)]
+ for name in stmt.select_names]
+
+ compiler._insert_from_select = stmt.select
+
+ add_select_cols = []
+ if stmt.include_insert_from_select_defaults:
+ col_set = set(cols)
+ for col in stmt.table.columns:
+ if col not in col_set and col.default:
+ cols.append(col)
+
+ for c in cols:
+ col_key = _getattr_col_key(c)
+ if col_key in parameters and col_key not in check_columns:
+ parameters.pop(col_key)
+ values.append((c, None))
+ else:
+ _append_param_insert_select_hasdefault(
+ compiler, stmt, c, add_select_cols, kw)
+
+ if add_select_cols:
+ values.extend(add_select_cols)
+ compiler._insert_from_select = compiler._insert_from_select._generate()
+ compiler._insert_from_select._raw_columns += tuple(
+ expr for col, expr in add_select_cols)
+
+
+def _scan_cols(
+ compiler, stmt, parameters, _getattr_col_key,
+ _column_as_key, _col_bind_name, check_columns, values, kw):
+
+ need_pks, implicit_returning, \
+ implicit_return_defaults, postfetch_lastrowid = \
+ _get_returning_modifiers(compiler, stmt)
+
+ cols = stmt.table.columns
+
+ for c in cols:
+ col_key = _getattr_col_key(c)
+ if col_key in parameters and col_key not in check_columns:
+
+ _append_param_parameter(
+ compiler, stmt, c, col_key, parameters, _col_bind_name,
+ implicit_returning, implicit_return_defaults, values, kw)
+
+ elif compiler.isinsert:
+ if c.primary_key and \
+ need_pks and \
+ (
+ implicit_returning or
+ not postfetch_lastrowid or
+ c is not stmt.table._autoincrement_column
+ ):
+
+ if implicit_returning:
+ _append_param_insert_pk_returning(
+ compiler, stmt, c, values, kw)
+ else:
+ _append_param_insert_pk(compiler, stmt, c, values, kw)
+
+ elif c.default is not None:
+
+ _append_param_insert_hasdefault(
+ compiler, stmt, c, implicit_return_defaults,
+ values, kw)
+
+ elif c.server_default is not None:
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ compiler.returning.append(c)
+ elif not c.primary_key:
+ compiler.postfetch.append(c)
+ elif implicit_return_defaults and \
+ c in implicit_return_defaults:
+ compiler.returning.append(c)
+
+ elif compiler.isupdate:
+ _append_param_update(
+ compiler, stmt, c, implicit_return_defaults, values, kw)
+
+
+def _append_param_parameter(
+ compiler, stmt, c, col_key, parameters, _col_bind_name,
+ implicit_returning, implicit_return_defaults, values, kw):
+ value = parameters.pop(col_key)
+ if elements._is_literal(value):
+ value = _create_bind_param(
+ compiler, c, value, required=value is REQUIRED,
+ name=_col_bind_name(c)
+ if not stmt._has_multi_parameters
+ else "%s_0" % _col_bind_name(c)
+ )
+ else:
+ if isinstance(value, elements.BindParameter) and \
+ value.type._isnull:
+ value = value._clone()
+ value.type = c.type
+
+ if c.primary_key and implicit_returning:
+ compiler.returning.append(c)
+ value = compiler.process(value.self_group(), **kw)
+ elif implicit_return_defaults and \
+ c in implicit_return_defaults:
+ compiler.returning.append(c)
+ value = compiler.process(value.self_group(), **kw)
+ else:
+ compiler.postfetch.append(c)
+ value = compiler.process(value.self_group(), **kw)
+ values.append((c, value))
+
+
+def _append_param_insert_pk_returning(compiler, stmt, c, values, kw):
+ if c.default is not None:
+ if c.default.is_sequence:
+ if compiler.dialect.supports_sequences and \
+ (not c.default.optional or
+ not compiler.dialect.sequences_optional):
+ proc = compiler.process(c.default, **kw)
+ values.append((c, proc))
+ compiler.returning.append(c)
+ elif c.default.is_clause_element:
+ values.append(
+ (c, compiler.process(
+ c.default.arg.self_group(), **kw))
+ )
+ compiler.returning.append(c)
+ else:
+ values.append(
+ (c, _create_bind_param(compiler, c, None))
+ )
+ compiler.prefetch.append(c)
+ else:
+ compiler.returning.append(c)
+
+
+def _append_param_insert_pk(compiler, stmt, c, values, kw):
+ if (
+ (c.default is not None and
+ (not c.default.is_sequence or
+ compiler.dialect.supports_sequences)) or
+ c is stmt.table._autoincrement_column and
+ (compiler.dialect.supports_sequences or
+ compiler.dialect.
+ preexecute_autoincrement_sequences)
+ ):
+ values.append(
+ (c, _create_bind_param(compiler, c, None))
+ )
+
+ compiler.prefetch.append(c)
+
+
+def _append_param_insert_hasdefault(
+ compiler, stmt, c, implicit_return_defaults, values, kw):
+
+ if c.default.is_sequence:
+ if compiler.dialect.supports_sequences and \
+ (not c.default.optional or
+ not compiler.dialect.sequences_optional):
+ proc = compiler.process(c.default, **kw)
+ values.append((c, proc))
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ compiler.returning.append(c)
+ elif not c.primary_key:
+ compiler.postfetch.append(c)
+ elif c.default.is_clause_element:
+ proc = compiler.process(c.default.arg.self_group(), **kw)
+ values.append((c, proc))
+
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ compiler.returning.append(c)
+ elif not c.primary_key:
+ # don't add primary key column to postfetch
+ compiler.postfetch.append(c)
+ else:
+ values.append(
+ (c, _create_bind_param(compiler, c, None))
+ )
+ compiler.prefetch.append(c)
+
+
+def _append_param_insert_select_hasdefault(
+ compiler, stmt, c, values, kw):
+
+ if c.default.is_sequence:
+ if compiler.dialect.supports_sequences and \
+ (not c.default.optional or
+ not compiler.dialect.sequences_optional):
+ proc = c.default
+ values.append((c, proc))
+ elif c.default.is_clause_element:
+ proc = c.default.arg.self_group()
+ values.append((c, proc))
+ else:
+ values.append(
+ (c, _create_bind_param(compiler, c, None, process=False))
+ )
+ compiler.prefetch.append(c)
+
+
+def _append_param_update(
+ compiler, stmt, c, implicit_return_defaults, values, kw):
+
+ if c.onupdate is not None and not c.onupdate.is_sequence:
+ if c.onupdate.is_clause_element:
+ values.append(
+ (c, compiler.process(
+ c.onupdate.arg.self_group(), **kw))
+ )
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ compiler.returning.append(c)
+ else:
+ compiler.postfetch.append(c)
+ else:
+ values.append(
+ (c, _create_bind_param(compiler, c, None))
+ )
+ compiler.prefetch.append(c)
+ elif c.server_onupdate is not None:
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ compiler.returning.append(c)
+ else:
+ compiler.postfetch.append(c)
+ elif implicit_return_defaults and \
+ c in implicit_return_defaults:
+ compiler.returning.append(c)
+
+
+def _get_multitable_params(
+ compiler, stmt, stmt_parameters, check_columns,
+ _col_bind_name, _getattr_col_key, values, kw):
+
+ normalized_params = dict(
+ (elements._clause_element_as_expr(c), param)
+ for c, param in stmt_parameters.items()
+ )
+ affected_tables = set()
+ for t in stmt._extra_froms:
+ for c in t.c:
+ if c in normalized_params:
+ affected_tables.add(t)
+ check_columns[_getattr_col_key(c)] = c
+ value = normalized_params[c]
+ if elements._is_literal(value):
+ value = _create_bind_param(
+ compiler, c, value, required=value is REQUIRED,
+ name=_col_bind_name(c))
+ else:
+ compiler.postfetch.append(c)
+ value = compiler.process(value.self_group(), **kw)
+ values.append((c, value))
+ # determine tables which are actually to be updated - process onupdate
+ # and server_onupdate for these
+ for t in affected_tables:
+ for c in t.c:
+ if c in normalized_params:
+ continue
+ elif (c.onupdate is not None and not
+ c.onupdate.is_sequence):
+ if c.onupdate.is_clause_element:
+ values.append(
+ (c, compiler.process(
+ c.onupdate.arg.self_group(),
+ **kw)
+ )
+ )
+ compiler.postfetch.append(c)
+ else:
+ values.append(
+ (c, _create_bind_param(
+ compiler, c, None, name=_col_bind_name(c)
+ )
+ )
+ )
+ compiler.prefetch.append(c)
+ elif c.server_onupdate is not None:
+ compiler.postfetch.append(c)
+
+
+def _extend_values_for_multiparams(compiler, stmt, values, kw):
+ values_0 = values
+ values = [values]
+
+ values.extend(
+ [
+ (
+ c,
+ (_create_bind_param(
+ compiler, c, row[c.key],
+ name="%s_%d" % (c.key, i + 1)
+ ) if elements._is_literal(row[c.key])
+ else compiler.process(
+ row[c.key].self_group(), **kw))
+ if c.key in row else param
+ )
+ for (c, param) in values_0
+ ]
+ for i, row in enumerate(stmt.parameters[1:])
+ )
+ return values
+
+
+def _get_stmt_parameters_params(
+ compiler, parameters, stmt_parameters, _column_as_key, values, kw):
+ for k, v in stmt_parameters.items():
+ colkey = _column_as_key(k)
+ if colkey is not None:
+ parameters.setdefault(colkey, v)
+ else:
+ # a non-Column expression on the left side;
+ # add it to values() in an "as-is" state,
+ # coercing right side to bound param
+ if elements._is_literal(v):
+ v = compiler.process(
+ elements.BindParameter(None, v, type_=k.type),
+ **kw)
+ else:
+ v = compiler.process(v.self_group(), **kw)
+
+ values.append((k, v))
+
+
+def _get_returning_modifiers(compiler, stmt):
+ need_pks = compiler.isinsert and \
+ not compiler.inline and \
+ not stmt._returning and \
+ not stmt._has_multi_parameters
+
+ implicit_returning = need_pks and \
+ compiler.dialect.implicit_returning and \
+ stmt.table.implicit_returning
+
+ if compiler.isinsert:
+ implicit_return_defaults = (implicit_returning and
+ stmt._return_defaults)
+ elif compiler.isupdate:
+ implicit_return_defaults = (compiler.dialect.implicit_returning and
+ stmt.table.implicit_returning and
+ stmt._return_defaults)
+ else:
+ implicit_return_defaults = False
+
+ if implicit_return_defaults:
+ if stmt._return_defaults is True:
+ implicit_return_defaults = set(stmt.table.c)
+ else:
+ implicit_return_defaults = set(stmt._return_defaults)
+
+ postfetch_lastrowid = need_pks and compiler.dialect.postfetch_lastrowid
+
+ return need_pks, implicit_returning, \
+ implicit_return_defaults, postfetch_lastrowid
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
index 1934d0776..9f2ce7ce3 100644
--- a/lib/sqlalchemy/sql/dml.py
+++ b/lib/sqlalchemy/sql/dml.py
@@ -475,6 +475,7 @@ class Insert(ValuesBase):
ValuesBase.__init__(self, table, values, prefixes)
self._bind = bind
self.select = self.select_names = None
+ self.include_insert_from_select_defaults = False
self.inline = inline
self._returning = returning
self._validate_dialect_kwargs(dialect_kw)
@@ -487,7 +488,7 @@ class Insert(ValuesBase):
return ()
@_generative
- def from_select(self, names, select):
+ def from_select(self, names, select, include_defaults=True):
"""Return a new :class:`.Insert` construct which represents
an ``INSERT...FROM SELECT`` statement.
@@ -506,6 +507,21 @@ class Insert(ValuesBase):
is not checked before passing along to the database, the database
would normally raise an exception if these column lists don't
correspond.
+ :param include_defaults: if True, non-server default values and
+ SQL expressions as specified on :class:`.Column` objects
+ (as documented in :ref:`metadata_defaults_toplevel`) not
+ otherwise specified in the list of names will be rendered
+ into the INSERT and SELECT statements, so that these values are also
+ included in the data to be inserted.
+
+ .. note:: A Python-side default that uses a Python callable function
+ will only be invoked **once** for the whole statement, and **not
+ per row**.
+
+ .. versionadded:: 1.0.0 - :meth:`.Insert.from_select` now renders
+ Python-side and SQL expression column defaults into the
+ SELECT statement for columns otherwise not included in the
+ list of column names.
.. versionchanged:: 1.0.0 an INSERT that uses FROM SELECT
implies that the :paramref:`.insert.inline` flag is set to
@@ -514,13 +530,6 @@ class Insert(ValuesBase):
deals with an arbitrary number of rows, so the
:attr:`.ResultProxy.inserted_primary_key` accessor does not apply.
- .. note::
-
- A SELECT..INSERT construct in SQL has no VALUES clause. Therefore
- :class:`.Column` objects which utilize Python-side defaults
- (e.g. as described at :ref:`metadata_defaults_toplevel`)
- will **not** take effect when using :meth:`.Insert.from_select`.
-
.. versionadded:: 0.8.3
"""
@@ -533,6 +542,7 @@ class Insert(ValuesBase):
self.select_names = names
self.inline = True
+ self.include_insert_from_select_defaults = include_defaults
self.select = _interpret_as_select(select)
def _copy_internals(self, clone=_clone, **kw):
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index cf8de936d..4d5bb9476 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -228,6 +228,7 @@ class ClauseElement(Visitable):
is_selectable = False
is_clause_element = True
+ description = None
_order_by_label_element = None
_is_from_container = False
@@ -540,7 +541,7 @@ class ClauseElement(Visitable):
__nonzero__ = __bool__
def __repr__(self):
- friendly = getattr(self, 'description', None)
+ friendly = self.description
if friendly is None:
return object.__repr__(self)
else:
@@ -1616,10 +1617,10 @@ class Null(ColumnElement):
return type_api.NULLTYPE
@classmethod
- def _singleton(cls):
+ def _instance(cls):
"""Return a constant :class:`.Null` construct."""
- return NULL
+ return Null()
def compare(self, other):
return isinstance(other, Null)
@@ -1640,11 +1641,11 @@ class False_(ColumnElement):
return type_api.BOOLEANTYPE
def _negate(self):
- return TRUE
+ return True_()
@classmethod
- def _singleton(cls):
- """Return a constant :class:`.False_` construct.
+ def _instance(cls):
+ """Return a :class:`.False_` construct.
E.g.::
@@ -1678,7 +1679,7 @@ class False_(ColumnElement):
"""
- return FALSE
+ return False_()
def compare(self, other):
return isinstance(other, False_)
@@ -1699,17 +1700,17 @@ class True_(ColumnElement):
return type_api.BOOLEANTYPE
def _negate(self):
- return FALSE
+ return False_()
@classmethod
def _ifnone(cls, other):
if other is None:
- return cls._singleton()
+ return cls._instance()
else:
return other
@classmethod
- def _singleton(cls):
+ def _instance(cls):
"""Return a constant :class:`.True_` construct.
E.g.::
@@ -1744,15 +1745,11 @@ class True_(ColumnElement):
"""
- return TRUE
+ return True_()
def compare(self, other):
return isinstance(other, True_)
-NULL = Null()
-FALSE = False_()
-TRUE = True_()
-
class ClauseList(ClauseElement):
"""Describe a list of clauses, separated by an operator.
@@ -2356,14 +2353,39 @@ class Extract(ColumnElement):
class _label_reference(ColumnElement):
+ """Wrap a column expression as it appears in a 'reference' context.
+
+ This expression is any that inclues an _order_by_label_element,
+ which is a Label, or a DESC / ASC construct wrapping a Label.
+
+ The production of _label_reference() should occur when an expression
+ is added to this context; this includes the ORDER BY or GROUP BY of a
+ SELECT statement, as well as a few other places, such as the ORDER BY
+ within an OVER clause.
+
+ """
__visit_name__ = 'label_reference'
- def __init__(self, text):
- self.text = self.key = text
+ def __init__(self, element):
+ self.element = element
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+
+ @property
+ def _from_objects(self):
+ return ()
+
+
+class _textual_label_reference(ColumnElement):
+ __visit_name__ = 'textual_label_reference'
+
+ def __init__(self, element):
+ self.element = element
@util.memoized_property
def _text_clause(self):
- return TextClause._create_text(self.text)
+ return TextClause._create_text(self.element)
class UnaryExpression(ColumnElement):
@@ -2863,6 +2885,120 @@ class Over(ColumnElement):
))
+class FunctionFilter(ColumnElement):
+ """Represent a function FILTER clause.
+
+ This is a special operator against aggregate and window functions,
+ which controls which rows are passed to it.
+ It's supported only by certain database backends.
+
+ Invocation of :class:`.FunctionFilter` is via
+ :meth:`.FunctionElement.filter`::
+
+ func.count(1).filter(True)
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.FunctionElement.filter`
+
+ """
+ __visit_name__ = 'funcfilter'
+
+ criterion = None
+
+ def __init__(self, func, *criterion):
+ """Produce a :class:`.FunctionFilter` object against a function.
+
+ Used against aggregate and window functions,
+ for database backends that support the "FILTER" clause.
+
+ E.g.::
+
+ from sqlalchemy import funcfilter
+ funcfilter(func.count(1), MyClass.name == 'some name')
+
+ Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')".
+
+ This function is also available from the :data:`~.expression.func`
+ construct itself via the :meth:`.FunctionElement.filter` method.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.FunctionElement.filter`
+
+
+ """
+ self.func = func
+ self.filter(*criterion)
+
+ def filter(self, *criterion):
+ """Produce an additional FILTER against the function.
+
+ This method adds additional criteria to the initial criteria
+ set up by :meth:`.FunctionElement.filter`.
+
+ Multiple criteria are joined together at SQL render time
+ via ``AND``.
+
+
+ """
+
+ for criterion in list(criterion):
+ criterion = _expression_literal_as_text(criterion)
+
+ if self.criterion is not None:
+ self.criterion = self.criterion & criterion
+ else:
+ self.criterion = criterion
+
+ return self
+
+ def over(self, partition_by=None, order_by=None):
+ """Produce an OVER clause against this filtered function.
+
+ Used against aggregate or so-called "window" functions,
+ for database backends that support window functions.
+
+ The expression::
+
+ func.rank().filter(MyClass.y > 5).over(order_by='x')
+
+ is shorthand for::
+
+ from sqlalchemy import over, funcfilter
+ over(funcfilter(func.rank(), MyClass.y > 5), order_by='x')
+
+ See :func:`~.expression.over` for a full description.
+
+ """
+ return Over(self, partition_by=partition_by, order_by=order_by)
+
+ @util.memoized_property
+ def type(self):
+ return self.func.type
+
+ def get_children(self, **kwargs):
+ return [c for c in
+ (self.func, self.criterion)
+ if c is not None]
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.func = clone(self.func, **kw)
+ if self.criterion is not None:
+ self.criterion = clone(self.criterion, **kw)
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(
+ *[c._from_objects for c in (self.func, self.criterion)
+ if c is not None]
+ ))
+
+
class Label(ColumnElement):
"""Represents a column label (AS).
@@ -3466,7 +3602,7 @@ def _string_or_unprintable(element):
else:
try:
return str(element)
- except:
+ except Exception:
return "unprintable element %r" % element
@@ -3556,6 +3692,13 @@ def _clause_element_as_expr(element):
def _literal_as_label_reference(element):
if isinstance(element, util.string_types):
+ return _textual_label_reference(element)
+
+ elif hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+
+ if isinstance(element, ColumnElement) and \
+ element._order_by_label_element is not None:
return _label_reference(element)
else:
return _literal_as_text(element)
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index d96f048b9..2ffc5468c 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -36,7 +36,7 @@ from .elements import ClauseElement, ColumnElement,\
True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
Grouping, not_, \
collate, literal_column, between,\
- literal, outparam, type_coerce, ClauseList
+ literal, outparam, type_coerce, ClauseList, FunctionFilter
from .elements import SavepointClause, RollbackToSavepointClause, \
ReleaseSavepointClause
@@ -89,14 +89,16 @@ asc = public_factory(UnaryExpression._create_asc, ".expression.asc")
desc = public_factory(UnaryExpression._create_desc, ".expression.desc")
distinct = public_factory(
UnaryExpression._create_distinct, ".expression.distinct")
-true = public_factory(True_._singleton, ".expression.true")
-false = public_factory(False_._singleton, ".expression.false")
-null = public_factory(Null._singleton, ".expression.null")
+true = public_factory(True_._instance, ".expression.true")
+false = public_factory(False_._instance, ".expression.false")
+null = public_factory(Null._instance, ".expression.null")
join = public_factory(Join._create_join, ".expression.join")
outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin")
insert = public_factory(Insert, ".expression.insert")
update = public_factory(Update, ".expression.update")
delete = public_factory(Delete, ".expression.delete")
+funcfilter = public_factory(
+ FunctionFilter, ".expression.funcfilter")
# internal functions still being called from tests and the ORM,
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index 7efb1e916..9280c7d60 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -12,7 +12,7 @@ from . import sqltypes, schema
from .base import Executable, ColumnCollection
from .elements import ClauseList, Cast, Extract, _literal_as_binds, \
literal_column, _type_from_args, ColumnElement, _clone,\
- Over, BindParameter
+ Over, BindParameter, FunctionFilter
from .selectable import FromClause, Select, Alias
from . import operators
@@ -116,6 +116,35 @@ class FunctionElement(Executable, ColumnElement, FromClause):
"""
return Over(self, partition_by=partition_by, order_by=order_by)
+ def filter(self, *criterion):
+ """Produce a FILTER clause against this function.
+
+ Used against aggregate and window functions,
+ for database backends that support the "FILTER" clause.
+
+ The expression::
+
+ func.count(1).filter(True)
+
+ is shorthand for::
+
+ from sqlalchemy import funcfilter
+ funcfilter(func.count(1), True)
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :class:`.FunctionFilter`
+
+ :func:`.funcfilter`
+
+
+ """
+ if not criterion:
+ return self
+ return FunctionFilter(self, *criterion)
+
@property
def _from_objects(self):
return self.clauses._from_objects
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index d9fd37f92..96cabbf4f 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -412,8 +412,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
table.dispatch.after_parent_attach(table, metadata)
return table
except:
- metadata._remove_table(name, schema)
- raise
+ with util.safe_reraise():
+ metadata._remove_table(name, schema)
@property
@util.deprecated('0.9', 'Use ``table.schema.quote``')
@@ -728,7 +728,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
checkfirst=checkfirst)
def tometadata(self, metadata, schema=RETAIN_SCHEMA,
- referred_schema_fn=None):
+ referred_schema_fn=None, name=None):
"""Return a copy of this :class:`.Table` associated with a different
:class:`.MetaData`.
@@ -785,13 +785,21 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
.. versionadded:: 0.9.2
- """
+ :param name: optional string name indicating the target table name.
+ If not specified or None, the table name is retained. This allows
+ a :class:`.Table` to be copied to the same :class:`.MetaData` target
+ with a new name.
+
+ .. versionadded:: 1.0.0
+ """
+ if name is None:
+ name = self.name
if schema is RETAIN_SCHEMA:
schema = self.schema
elif schema is None:
schema = metadata.schema
- key = _get_table_key(self.name, schema)
+ key = _get_table_key(name, schema)
if key in metadata.tables:
util.warn("Table '%s' already exists within the given "
"MetaData - not copying." % self.description)
@@ -801,7 +809,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
for c in self.columns:
args.append(c.copy(schema=schema))
table = Table(
- self.name, metadata, schema=schema,
+ name, metadata, schema=schema,
*args, **self.kwargs
)
for c in self.constraints:
@@ -1061,8 +1069,8 @@ class Column(SchemaItem, ColumnClause):
conditionally rendered differently on different backends,
consider custom compilation rules for :class:`.CreateColumn`.
- ..versionadded:: 0.8.3 Added the ``system=True`` parameter to
- :class:`.Column`.
+ .. versionadded:: 0.8.3 Added the ``system=True`` parameter to
+ :class:`.Column`.
"""
@@ -1222,8 +1230,10 @@ class Column(SchemaItem, ColumnClause):
existing = getattr(self, 'table', None)
if existing is not None and existing is not table:
raise exc.ArgumentError(
- "Column object already assigned to Table '%s'" %
- existing.description)
+ "Column object '%s' already assigned to Table '%s'" % (
+ self.key,
+ existing.description
+ ))
if self.key in table._columns:
col = table._columns.get(self.key)
@@ -1547,7 +1557,7 @@ class ForeignKey(DialectKWArgs, SchemaItem):
)
return self._schema_item_copy(fk)
- def _get_colspec(self, schema=None):
+ def _get_colspec(self, schema=None, table_name=None):
"""Return a string based 'column specification' for this
:class:`.ForeignKey`.
@@ -1557,7 +1567,15 @@ class ForeignKey(DialectKWArgs, SchemaItem):
"""
if schema:
_schema, tname, colname = self._column_tokens
+ if table_name is not None:
+ tname = table_name
return "%s.%s.%s" % (schema, tname, colname)
+ elif table_name:
+ schema, tname, colname = self._column_tokens
+ if schema:
+ return "%s.%s.%s" % (schema, table_name, colname)
+ else:
+ return "%s.%s" % (table_name, colname)
elif self._table_column is not None:
return "%s.%s" % (
self._table_column.table.fullname, self._table_column.key)
@@ -2649,10 +2667,15 @@ class ForeignKeyConstraint(Constraint):
event.listen(table.metadata, "before_drop",
ddl.DropConstraint(self, on=supports_alter))
- def copy(self, schema=None, **kw):
+ def copy(self, schema=None, target_table=None, **kw):
fkc = ForeignKeyConstraint(
[x.parent.key for x in self._elements.values()],
- [x._get_colspec(schema=schema)
+ [x._get_colspec(
+ schema=schema,
+ table_name=target_table.name
+ if target_table is not None
+ and x._table_key() == x.parent.table.key
+ else None)
for x in self._elements.values()],
name=self.name,
onupdate=self.onupdate,
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index 57b16f45f..8198a6733 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -746,6 +746,33 @@ class Join(FromClause):
providing a "natural join".
"""
+ constraints = cls._joincond_scan_left_right(
+ a, a_subset, b, consider_as_foreign_keys)
+
+ if len(constraints) > 1:
+ cls._joincond_trim_constraints(
+ a, b, constraints, consider_as_foreign_keys)
+
+ if len(constraints) == 0:
+ if isinstance(b, FromGrouping):
+ hint = " Perhaps you meant to convert the right side to a "\
+ "subquery using alias()?"
+ else:
+ hint = ""
+ raise exc.NoForeignKeysError(
+ "Can't find any foreign key relationships "
+ "between '%s' and '%s'.%s" %
+ (a.description, b.description, hint))
+
+ crit = [(x == y) for x, y in list(constraints.values())[0]]
+ if len(crit) == 1:
+ return (crit[0])
+ else:
+ return and_(*crit)
+
+ @classmethod
+ def _joincond_scan_left_right(
+ cls, a, a_subset, b, consider_as_foreign_keys):
constraints = collections.defaultdict(list)
for left in (a_subset, a):
@@ -780,57 +807,41 @@ class Join(FromClause):
if nrte.table_name == b.name:
raise
else:
- # this is totally covered. can't get
- # coverage to mark it.
continue
if col is not None:
constraints[fk.constraint].append((col, fk.parent))
if constraints:
break
+ return constraints
+ @classmethod
+ def _joincond_trim_constraints(
+ cls, a, b, constraints, consider_as_foreign_keys):
+ # more than one constraint matched. narrow down the list
+ # to include just those FKCs that match exactly to
+ # "consider_as_foreign_keys".
+ if consider_as_foreign_keys:
+ for const in list(constraints):
+ if set(f.parent for f in const.elements) != set(
+ consider_as_foreign_keys):
+ del constraints[const]
+
+ # if still multiple constraints, but
+ # they all refer to the exact same end result, use it.
if len(constraints) > 1:
- # more than one constraint matched. narrow down the list
- # to include just those FKCs that match exactly to
- # "consider_as_foreign_keys".
- if consider_as_foreign_keys:
- for const in list(constraints):
- if set(f.parent for f in const.elements) != set(
- consider_as_foreign_keys):
- del constraints[const]
-
- # if still multiple constraints, but
- # they all refer to the exact same end result, use it.
- if len(constraints) > 1:
- dedupe = set(tuple(crit) for crit in constraints.values())
- if len(dedupe) == 1:
- key = list(constraints)[0]
- constraints = {key: constraints[key]}
-
- if len(constraints) != 1:
- raise exc.AmbiguousForeignKeysError(
- "Can't determine join between '%s' and '%s'; "
- "tables have more than one foreign key "
- "constraint relationship between them. "
- "Please specify the 'onclause' of this "
- "join explicitly." % (a.description, b.description))
-
- if len(constraints) == 0:
- if isinstance(b, FromGrouping):
- hint = " Perhaps you meant to convert the right side to a "\
- "subquery using alias()?"
- else:
- hint = ""
- raise exc.NoForeignKeysError(
- "Can't find any foreign key relationships "
- "between '%s' and '%s'.%s" %
- (a.description, b.description, hint))
-
- crit = [(x == y) for x, y in list(constraints.values())[0]]
- if len(crit) == 1:
- return (crit[0])
- else:
- return and_(*crit)
+ dedupe = set(tuple(crit) for crit in constraints.values())
+ if len(dedupe) == 1:
+ key = list(constraints)[0]
+ constraints = {key: constraints[key]}
+
+ if len(constraints) != 1:
+ raise exc.AmbiguousForeignKeysError(
+ "Can't determine join between '%s' and '%s'; "
+ "tables have more than one foreign key "
+ "constraint relationship between them. "
+ "Please specify the 'onclause' of this "
+ "join explicitly." % (a.description, b.description))
def select(self, whereclause=None, **kwargs):
"""Create a :class:`.Select` from this :class:`.Join`.
@@ -1742,14 +1753,42 @@ class GenerativeSelect(SelectBase):
@_generative
def limit(self, limit):
"""return a new selectable with the given LIMIT criterion
- applied."""
+ applied.
+
+ This is a numerical value which usually renders as a ``LIMIT``
+ expression in the resulting select. Backends that don't
+ support ``LIMIT`` will attempt to provide similar
+ functionality.
+
+ .. versionchanged:: 1.0.0 - :meth:`.Select.limit` can now
+ accept arbitrary SQL expressions as well as integer values.
+
+ :param limit: an integer LIMIT parameter, or a SQL expression
+ that provides an integer result.
+
+ """
self._limit_clause = _offset_or_limit_clause(limit)
@_generative
def offset(self, offset):
"""return a new selectable with the given OFFSET criterion
- applied."""
+ applied.
+
+
+ This is a numeric value which usually renders as an ``OFFSET``
+ expression in the resulting select. Backends that don't
+ support ``OFFSET`` will attempt to provide similar
+ functionality.
+
+
+ .. versionchanged:: 1.0.0 - :meth:`.Select.offset` can now
+ accept arbitrary SQL expressions as well as integer values.
+
+ :param offset: an integer OFFSET parameter, or a SQL expression
+ that provides an integer result.
+
+ """
self._offset_clause = _offset_or_limit_clause(offset)
@@ -1885,9 +1924,10 @@ class CompoundSelect(GenerativeSelect):
@property
def _label_resolve_dict(self):
- return dict(
+ d = dict(
(c.key, c) for c in self.c
)
+ return d, d
@classmethod
def _create_union(cls, *selects, **kwargs):
@@ -2124,6 +2164,7 @@ class Select(HasPrefixes, GenerativeSelect):
_prefixes = ()
_hints = util.immutabledict()
+ _statement_hints = ()
_distinct = False
_from_cloned = None
_correlate = ()
@@ -2155,38 +2196,57 @@ class Select(HasPrefixes, GenerativeSelect):
:func:`.select`.
:param columns:
- A list of :class:`.ClauseElement` objects, typically
- :class:`.ColumnElement` objects or subclasses, which will form the
- columns clause of the resulting statement. For all members which are
- instances of :class:`.Selectable`, the individual
- :class:`.ColumnElement` members of the :class:`.Selectable` will be
- added individually to the columns clause. For example, specifying a
- :class:`~sqlalchemy.schema.Table` instance will result in all the
- contained :class:`~sqlalchemy.schema.Column` objects within to be
- added to the columns clause.
-
- This argument is not present on the form of :func:`select()`
- available on :class:`~sqlalchemy.schema.Table`.
+ A list of :class:`.ColumnElement` or :class:`.FromClause`
+ objects which will form the columns clause of the resulting
+ statement. For those objects that are instances of
+ :class:`.FromClause` (typically :class:`.Table` or :class:`.Alias`
+ objects), the :attr:`.FromClause.c` collection is extracted
+ to form a collection of :class:`.ColumnElement` objects.
+
+ This parameter will also accept :class:`.Text` constructs as
+ given, as well as ORM-mapped classes.
+
+ .. note::
+
+ The :paramref:`.select.columns` parameter is not available
+ in the method form of :func:`.select`, e.g.
+ :meth:`.FromClause.select`.
+
+ .. seealso::
+
+ :meth:`.Select.column`
+
+ :meth:`.Select.with_only_columns`
:param whereclause:
A :class:`.ClauseElement` expression which will be used to form the
- ``WHERE`` clause.
+ ``WHERE`` clause. It is typically preferable to add WHERE
+ criterion to an existing :class:`.Select` using method chaining
+ with :meth:`.Select.where`.
+
+ .. seealso::
+
+ :meth:`.Select.where`
:param from_obj:
A list of :class:`.ClauseElement` objects which will be added to the
- ``FROM`` clause of the resulting statement. Note that "from" objects
- are automatically located within the columns and whereclause
- ClauseElements. Use this parameter to explicitly specify "from"
- objects which are not automatically locatable. This could include
- :class:`~sqlalchemy.schema.Table` objects that aren't otherwise
- present, or :class:`.Join` objects whose presence will supersede
- that of the :class:`~sqlalchemy.schema.Table` objects already
- located in the other clauses.
+ ``FROM`` clause of the resulting statement. This is equivalent
+ to calling :meth:`.Select.select_from` using method chaining on
+ an existing :class:`.Select` object.
+
+ .. seealso::
+
+ :meth:`.Select.select_from` - full description of explicit
+ FROM clause specification.
:param autocommit:
- Deprecated. Use .execution_options(autocommit=<True|False>)
+ Deprecated. Use ``.execution_options(autocommit=<True|False>)``
to set the autocommit option.
+ .. seealso::
+
+ :meth:`.Executable.execution_options`
+
:param bind=None:
an :class:`~.Engine` or :class:`~.Connection` instance
to which the
@@ -2198,11 +2258,13 @@ class Select(HasPrefixes, GenerativeSelect):
:param correlate=True:
indicates that this :class:`.Select` object should have its
contained :class:`.FromClause` elements "correlated" to an enclosing
- :class:`.Select` object. This means that any
- :class:`.ClauseElement` instance within the "froms" collection of
- this :class:`.Select` which is also present in the "froms"
- collection of an enclosing select will not be rendered in the
- ``FROM`` clause of this select statement.
+ :class:`.Select` object. It is typically preferable to specify
+ correlations on an existing :class:`.Select` construct using
+ :meth:`.Select.correlate`.
+
+ .. seealso::
+
+ :meth:`.Select.correlate` - full description of correlation.
:param distinct=False:
when ``True``, applies a ``DISTINCT`` qualifier to the columns
@@ -2213,15 +2275,19 @@ class Select(HasPrefixes, GenerativeSelect):
is understood by the Postgresql dialect to render the
``DISTINCT ON (<columns>)`` syntax.
- ``distinct`` is also available via the :meth:`~.Select.distinct`
- generative method.
+ ``distinct`` is also available on an existing :class:`.Select`
+ object via the :meth:`~.Select.distinct` method.
+
+ .. seealso::
+
+ :meth:`.Select.distinct`
:param for_update=False:
when ``True``, applies ``FOR UPDATE`` to the end of the
resulting statement.
.. deprecated:: 0.9.0 - use
- :meth:`.GenerativeSelect.with_for_update` to specify the
+ :meth:`.Select.with_for_update` to specify the
structure of the ``FOR UPDATE`` clause.
``for_update`` accepts various string values interpreted by
@@ -2236,32 +2302,62 @@ class Select(HasPrefixes, GenerativeSelect):
.. seealso::
- :meth:`.GenerativeSelect.with_for_update` - improved API for
+ :meth:`.Select.with_for_update` - improved API for
specifying the ``FOR UPDATE`` clause.
:param group_by:
a list of :class:`.ClauseElement` objects which will comprise the
- ``GROUP BY`` clause of the resulting select.
+ ``GROUP BY`` clause of the resulting select. This parameter
+ is typically specified more naturally using the
+ :meth:`.Select.group_by` method on an existing :class:`.Select`.
+
+ .. seealso::
+
+ :meth:`.Select.group_by`
:param having:
a :class:`.ClauseElement` that will comprise the ``HAVING`` clause
- of the resulting select when ``GROUP BY`` is used.
+ of the resulting select when ``GROUP BY`` is used. This parameter
+ is typically specified more naturally using the
+ :meth:`.Select.having` method on an existing :class:`.Select`.
+
+ .. seealso::
+
+ :meth:`.Select.having`
:param limit=None:
- a numerical value which usually compiles to a ``LIMIT``
- expression in the resulting select. Databases that don't
+ a numerical value which usually renders as a ``LIMIT``
+ expression in the resulting select. Backends that don't
support ``LIMIT`` will attempt to provide similar
- functionality.
+ functionality. This parameter is typically specified more naturally
+ using the :meth:`.Select.limit` method on an existing
+ :class:`.Select`.
+
+ .. seealso::
+
+ :meth:`.Select.limit`
:param offset=None:
- a numeric value which usually compiles to an ``OFFSET``
- expression in the resulting select. Databases that don't
+ a numeric value which usually renders as an ``OFFSET``
+ expression in the resulting select. Backends that don't
support ``OFFSET`` will attempt to provide similar
- functionality.
+ functionality. This parameter is typically specified more naturally
+ using the :meth:`.Select.offset` method on an existing
+ :class:`.Select`.
+
+ .. seealso::
+
+ :meth:`.Select.offset`
:param order_by:
a scalar or list of :class:`.ClauseElement` objects which will
comprise the ``ORDER BY`` clause of the resulting select.
+ This parameter is typically specified more naturally using the
+ :meth:`.Select.order_by` method on an existing :class:`.Select`.
+
+ .. seealso::
+
+ :meth:`.Select.order_by`
:param use_labels=False:
when ``True``, the statement will be generated using labels
@@ -2272,8 +2368,13 @@ class Select(HasPrefixes, GenerativeSelect):
collection of the resulting :class:`.Select` object will use these
names as well for targeting column members.
- use_labels is also available via the
- :meth:`~.GenerativeSelect.apply_labels` generative method.
+ This parameter can also be specified on an existing
+ :class:`.Select` object using the :meth:`.Select.apply_labels`
+ method.
+
+ .. seealso::
+
+ :meth:`.Select.apply_labels`
"""
self._auto_correlate = correlate
@@ -2436,10 +2537,30 @@ class Select(HasPrefixes, GenerativeSelect):
return self._get_display_froms()
+ def with_statement_hint(self, text, dialect_name='*'):
+ """add a statement hint to this :class:`.Select`.
+
+ This method is similar to :meth:`.Select.with_hint` except that
+ it does not require an individual table, and instead applies to the
+ statement as a whole.
+
+ Hints here are specific to the backend database and may include
+ directives such as isolation levels, file directives, fetch directives,
+ etc.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`.Select.with_hint`
+
+ """
+ return self.with_hint(None, text, dialect_name)
+
@_generative
def with_hint(self, selectable, text, dialect_name='*'):
- """Add an indexing hint for the given selectable to this
- :class:`.Select`.
+ """Add an indexing or other executional context hint for the given
+ selectable to this :class:`.Select`.
The text of the hint is rendered in the appropriate
location for the database backend in use, relative
@@ -2451,7 +2572,7 @@ class Select(HasPrefixes, GenerativeSelect):
following::
select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)")
+ with_hint(mytable, "index(%(name)s ix_mytable)")
Would render SQL as::
@@ -2462,13 +2583,19 @@ class Select(HasPrefixes, GenerativeSelect):
and Sybase simultaneously::
select([mytable]).\\
- with_hint(
- mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
+ with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
+ .. seealso::
+
+ :meth:`.Select.with_statement_hint`
+
"""
- self._hints = self._hints.union(
- {(selectable, dialect_name): text})
+ if selectable is None:
+ self._statement_hints += ((dialect_name, text), )
+ else:
+ self._hints = self._hints.union(
+ {(selectable, dialect_name): text})
@property
def type(self):
@@ -2499,15 +2626,16 @@ class Select(HasPrefixes, GenerativeSelect):
@_memoized_property
def _label_resolve_dict(self):
- d = dict(
+ with_cols = dict(
(c._resolve_label or c._label or c.key, c)
for c in _select_iterables(self._raw_columns)
if c._allow_label_resolve)
- d.update(
+ only_froms = dict(
(c.key, c) for c in
_select_iterables(self.froms) if c._allow_label_resolve)
+ with_cols.update(only_froms)
- return d
+ return with_cols, only_froms
def is_derived_from(self, fromclause):
if self in fromclause._cloned_set:
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index d6f3b5915..fbbe15da3 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -16,7 +16,7 @@ from itertools import chain
from collections import deque
from .elements import BindParameter, ColumnClause, ColumnElement, \
- Null, UnaryExpression, literal_column, Label
+ Null, UnaryExpression, literal_column, Label, _label_reference
from .selectable import ScalarSelect, Join, FromClause, FromGrouping
from .schema import Column
@@ -161,6 +161,8 @@ def unwrap_order_by(clause):
not isinstance(t, UnaryExpression) or
not operators.is_ordering_modifier(t.modifier)
):
+ if isinstance(t, _label_reference):
+ t = t.element
cols.add(t)
else:
for c in t.get_children():
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 3a3f5be10..0f6f59401 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -37,8 +37,6 @@ class ConnectionKiller(object):
def _safe(self, fn):
try:
fn()
- except (SystemExit, KeyboardInterrupt):
- raise
except Exception as e:
warnings.warn(
"testing_reaper couldn't "
@@ -168,8 +166,6 @@ class ReconnectFixture(object):
def _safe(self, fn):
try:
fn()
- except (SystemExit, KeyboardInterrupt):
- raise
except Exception as e:
warnings.warn(
"ReconnectFixture couldn't "
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index c9f81c8b9..e3d91300d 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -135,7 +135,7 @@ class compound(object):
name, fail._as_string(config), str(exc_value))))
break
else:
- raise exc_type, exc_value, exc_traceback
+ util.raise_from_cause(ex)
def _expect_success(self, config, name='block'):
if not self.fails:
@@ -180,8 +180,7 @@ class Predicate(object):
@classmethod
def as_predicate(cls, predicate, description=None):
if isinstance(predicate, compound):
- return cls.as_predicate(predicate.fails.union(predicate.skips))
-
+ return cls.as_predicate(predicate.enabled_for_config, description)
elif isinstance(predicate, Predicate):
if description and predicate.description is None:
predicate.description = description
diff --git a/lib/sqlalchemy/testing/plugin/bootstrap.py b/lib/sqlalchemy/testing/plugin/bootstrap.py
new file mode 100644
index 000000000..497fcb7e5
--- /dev/null
+++ b/lib/sqlalchemy/testing/plugin/bootstrap.py
@@ -0,0 +1,44 @@
+"""
+Bootstrapper for nose/pytest plugins.
+
+The entire rationale for this system is to get the modules in plugin/
+imported without importing all of the supporting library, so that we can
+set up things for testing before coverage starts.
+
+The rationale for all of plugin/ being *in* the supporting library in the
+first place is so that the testing and plugin suite is available to other
+libraries, mainly external SQLAlchemy and Alembic dialects, to make use
+of the same test environment and standard suites available to
+SQLAlchemy/Alembic themselves without the need to ship/install a separate
+package outside of SQLAlchemy.
+
+NOTE: copied/adapted from SQLAlchemy master for backwards compatibility;
+this should be removable when Alembic targets SQLAlchemy 1.0.0.
+
+"""
+
+import os
+import sys
+
+bootstrap_file = locals()['bootstrap_file']
+to_bootstrap = locals()['to_bootstrap']
+
+
+def load_file_as_module(name):
+ path = os.path.join(os.path.dirname(bootstrap_file), "%s.py" % name)
+ if sys.version_info >= (3, 3):
+ from importlib import machinery
+ mod = machinery.SourceFileLoader(name, path).load_module()
+ else:
+ import imp
+ mod = imp.load_source(name, path)
+ return mod
+
+if to_bootstrap == "pytest":
+ sys.modules["sqla_plugin_base"] = load_file_as_module("plugin_base")
+ sys.modules["sqla_pytestplugin"] = load_file_as_module("pytestplugin")
+elif to_bootstrap == "nose":
+ sys.modules["sqla_plugin_base"] = load_file_as_module("plugin_base")
+ sys.modules["sqla_noseplugin"] = load_file_as_module("noseplugin")
+else:
+ raise Exception("unknown bootstrap: %s" % to_bootstrap) # noqa
diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py
index 6ef539142..538087770 100644
--- a/lib/sqlalchemy/testing/plugin/noseplugin.py
+++ b/lib/sqlalchemy/testing/plugin/noseplugin.py
@@ -12,6 +12,14 @@ way (e.g. as a package-less import).
"""
+try:
+ # installed by bootstrap.py
+ import sqla_plugin_base as plugin_base
+except ImportError:
+ # assume we're a package, use traditional import
+ from . import plugin_base
+
+
import os
import sys
@@ -19,16 +27,6 @@ from nose.plugins import Plugin
fixtures = None
py3k = sys.version_info >= (3, 0)
-# no package imports yet! this prevents us from tripping coverage
-# too soon.
-path = os.path.join(os.path.dirname(__file__), "plugin_base.py")
-if sys.version_info >= (3, 3):
- from importlib import machinery
- plugin_base = machinery.SourceFileLoader(
- "plugin_base", path).load_module()
-else:
- import imp
- plugin_base = imp.load_source("plugin_base", path)
class NoseSQLAlchemy(Plugin):
@@ -58,10 +56,10 @@ class NoseSQLAlchemy(Plugin):
plugin_base.set_coverage_flag(options.enable_plugin_coverage)
+ def begin(self):
global fixtures
- from sqlalchemy.testing import fixtures
+ from sqlalchemy.testing import fixtures # noqa
- def begin(self):
plugin_base.post_begin()
def describeTest(self, test):
@@ -72,19 +70,23 @@ class NoseSQLAlchemy(Plugin):
def wantMethod(self, fn):
if py3k:
+ if not hasattr(fn.__self__, 'cls'):
+ return False
cls = fn.__self__.cls
else:
cls = fn.im_class
- print "METH:", fn, "CLS:", cls
return plugin_base.want_method(cls, fn)
def wantClass(self, cls):
return plugin_base.want_class(cls)
def beforeTest(self, test):
- plugin_base.before_test(test,
- test.test.cls.__module__,
- test.test.cls, test.test.method.__name__)
+ if not hasattr(test.test, 'cls'):
+ return
+ plugin_base.before_test(
+ test,
+ test.test.cls.__module__,
+ test.test.cls, test.test.method.__name__)
def afterTest(self, test):
plugin_base.after_test(test)
diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py
index 7ba31d3e3..6696427dc 100644
--- a/lib/sqlalchemy/testing/plugin/plugin_base.py
+++ b/lib/sqlalchemy/testing/plugin/plugin_base.py
@@ -31,8 +31,6 @@ if py3k:
else:
import ConfigParser as configparser
-FOLLOWER_IDENT = None
-
# late imports
fixtures = None
engines = None
@@ -72,8 +70,6 @@ def setup_options(make_option):
help="Drop all tables in the target database first")
make_option("--backend-only", action="store_true", dest="backend_only",
help="Run only tests marked with __backend__")
- make_option("--mockpool", action="store_true", dest="mockpool",
- help="Use mock pool (asserts only one connection used)")
make_option("--low-connections", action="store_true",
dest="low_connections",
help="Use a low number of distinct connections - "
@@ -95,14 +91,6 @@ def setup_options(make_option):
make_option("--exclude-tag", action="callback", callback=_exclude_tag,
type="string",
help="Exclude tests with tag <tag>")
- make_option("--serverside", action="store_true",
- help="Turn on server side cursors for PG")
- make_option("--mysql-engine", action="store",
- dest="mysql_engine", default=None,
- help="Use the specified MySQL storage engine for all tables, "
- "default is a db-default/InnoDB combo.")
- make_option("--tableopts", action="append", dest="tableopts", default=[],
- help="Add a dialect-specific table option, key=value")
make_option("--write-profiles", action="store_true",
dest="write_profiles", default=False,
help="Write/update profiling data.")
@@ -115,8 +103,8 @@ def configure_follower(follower_ident):
database creation.
"""
- global FOLLOWER_IDENT
- FOLLOWER_IDENT = follower_ident
+ from sqlalchemy.testing import provision
+ provision.FOLLOWER_IDENT = follower_ident
def memoize_important_follower_config(dict_):
@@ -177,12 +165,14 @@ def post_begin():
global util, fixtures, engines, exclusions, \
assertions, warnings, profiling,\
config, testing
- from sqlalchemy import testing
- from sqlalchemy.testing import fixtures, engines, exclusions, \
- assertions, warnings, profiling, config
- from sqlalchemy import util
+ from sqlalchemy import testing # noqa
+ from sqlalchemy.testing import fixtures, engines, exclusions # noqa
+ from sqlalchemy.testing import assertions, warnings, profiling # noqa
+ from sqlalchemy.testing import config # noqa
+ from sqlalchemy import util # noqa
warnings.setup_filters()
+
def _log(opt_str, value, parser):
global logging
if not logging:
@@ -234,12 +224,6 @@ def _setup_options(opt, file_config):
@pre
-def _server_side_cursors(options, file_config):
- if options.serverside:
- db_opts['server_side_cursors'] = True
-
-
-@pre
def _monkeypatch_cdecimal(options, file_config):
if options.cdecimal:
import cdecimal
@@ -250,7 +234,7 @@ def _monkeypatch_cdecimal(options, file_config):
def _engine_uri(options, file_config):
from sqlalchemy.testing import config
from sqlalchemy import testing
- from sqlalchemy.testing.plugin import provision
+ from sqlalchemy.testing import provision
if options.dburi:
db_urls = list(options.dburi)
@@ -273,20 +257,13 @@ def _engine_uri(options, file_config):
for db_url in db_urls:
cfg = provision.setup_config(
- db_url, db_opts, options, file_config, FOLLOWER_IDENT)
+ db_url, db_opts, options, file_config, provision.FOLLOWER_IDENT)
if not config._current:
cfg.set_as_current(cfg, testing)
@post
-def _engine_pool(options, file_config):
- if options.mockpool:
- from sqlalchemy import pool
- db_opts['poolclass'] = pool.AssertionPool
-
-
-@post
def _requirements(options, file_config):
requirement_cls = file_config.get('sqla_testing', "requirement_cls")
@@ -369,19 +346,6 @@ def _prep_testing_database(options, file_config):
@post
-def _set_table_options(options, file_config):
- from sqlalchemy.testing import schema
-
- table_options = schema.table_options
- for spec in options.tableopts:
- key, value = spec.split('=')
- table_options[key] = value
-
- if options.mysql_engine:
- table_options['mysql_engine'] = options.mysql_engine
-
-
-@post
def _reverse_topological(options, file_config):
if options.reversetop:
from sqlalchemy.orm.util import randomize_unitofwork
diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py
index 005942913..4bbc8ed9a 100644
--- a/lib/sqlalchemy/testing/plugin/pytestplugin.py
+++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py
@@ -1,7 +1,13 @@
+try:
+ # installed by bootstrap.py
+ import sqla_plugin_base as plugin_base
+except ImportError:
+ # assume we're a package, use traditional import
+ from . import plugin_base
+
import pytest
import argparse
import inspect
-from . import plugin_base
import collections
import itertools
@@ -42,6 +48,8 @@ def pytest_configure(config):
plugin_base.set_coverage_flag(bool(getattr(config.option,
"cov_source", False)))
+
+def pytest_sessionstart(session):
plugin_base.post_begin()
if has_xdist:
@@ -54,11 +62,11 @@ if has_xdist:
plugin_base.memoize_important_follower_config(node.slaveinput)
node.slaveinput["follower_ident"] = "test_%s" % next(_follower_count)
- from . import provision
+ from sqlalchemy.testing import provision
provision.create_follower_db(node.slaveinput["follower_ident"])
def pytest_testnodedown(node, error):
- from . import provision
+ from sqlalchemy.testing import provision
provision.drop_follower_db(node.slaveinput["follower_ident"])
diff --git a/lib/sqlalchemy/testing/plugin/provision.py b/lib/sqlalchemy/testing/provision.py
index c6b9030f5..c8f7fdf30 100644
--- a/lib/sqlalchemy/testing/plugin/provision.py
+++ b/lib/sqlalchemy/testing/provision.py
@@ -1,8 +1,10 @@
from sqlalchemy.engine import url as sa_url
from sqlalchemy import text
from sqlalchemy.util import compat
-from .. import config, engines
-import os
+from . import config, engines
+
+
+FOLLOWER_IDENT = None
class register(object):
@@ -118,7 +120,7 @@ def _pg_create_db(cfg, eng, ident):
isolation_level="AUTOCOMMIT") as conn:
try:
_pg_drop_db(cfg, conn, ident)
- except:
+ except Exception:
pass
currentdb = conn.scalar("select current_database()")
conn.execute("CREATE DATABASE %s TEMPLATE %s" % (ident, currentdb))
@@ -129,7 +131,7 @@ def _mysql_create_db(cfg, eng, ident):
with eng.connect() as conn:
try:
_mysql_drop_db(cfg, conn, ident)
- except:
+ except Exception:
pass
conn.execute("CREATE DATABASE %s" % ident)
conn.execute("CREATE DATABASE %s_test_schema" % ident)
@@ -171,15 +173,15 @@ def _mysql_drop_db(cfg, eng, ident):
with eng.connect() as conn:
try:
conn.execute("DROP DATABASE %s_test_schema" % ident)
- except:
+ except Exception:
pass
try:
conn.execute("DROP DATABASE %s_test_schema_2" % ident)
- except:
+ except Exception:
pass
try:
conn.execute("DROP DATABASE %s" % ident)
- except:
+ except Exception:
pass
diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py
index a04bcbbdd..da3e3128a 100644
--- a/lib/sqlalchemy/testing/requirements.py
+++ b/lib/sqlalchemy/testing/requirements.py
@@ -314,6 +314,20 @@ class SuiteRequirements(Requirements):
return exclusions.open()
@property
+ def temp_table_reflection(self):
+ return exclusions.open()
+
+ @property
+ def temp_table_names(self):
+ """target dialect supports listing of temporary table names"""
+ return exclusions.closed()
+
+ @property
+ def temporary_views(self):
+ """target database supports temporary views"""
+ return exclusions.closed()
+
+ @property
def index_reflection(self):
return exclusions.open()
diff --git a/lib/sqlalchemy/testing/runner.py b/lib/sqlalchemy/testing/runner.py
index df254520b..23d7a0a91 100644
--- a/lib/sqlalchemy/testing/runner.py
+++ b/lib/sqlalchemy/testing/runner.py
@@ -30,7 +30,7 @@ SQLAlchemy itself is possible.
"""
-from sqlalchemy.testing.plugin.noseplugin import NoseSQLAlchemy
+from .plugin.noseplugin import NoseSQLAlchemy
import nose
diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py
index 92d3d93e5..38519dfb9 100644
--- a/lib/sqlalchemy/testing/suite/test_insert.py
+++ b/lib/sqlalchemy/testing/suite/test_insert.py
@@ -4,7 +4,7 @@ from .. import exclusions
from ..assertions import eq_
from .. import engines
-from sqlalchemy import Integer, String, select, util
+from sqlalchemy import Integer, String, select, literal_column, literal
from ..schema import Table, Column
@@ -90,6 +90,13 @@ class InsertBehaviorTest(fixtures.TablesTest):
Column('id', Integer, primary_key=True, autoincrement=False),
Column('data', String(50))
)
+ Table('includes_defaults', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('x', Integer, default=5),
+ Column('y', Integer,
+ default=literal_column("2", type_=Integer) + literal(2)))
def test_autoclose_on_insert(self):
if requirements.returning.enabled:
@@ -158,6 +165,34 @@ class InsertBehaviorTest(fixtures.TablesTest):
("data3", ), ("data3", )]
)
+ @requirements.insert_from_select
+ def test_insert_from_select_with_defaults(self):
+ table = self.tables.includes_defaults
+ config.db.execute(
+ table.insert(),
+ [
+ dict(id=1, data="data1"),
+ dict(id=2, data="data2"),
+ dict(id=3, data="data3"),
+ ]
+ )
+
+ config.db.execute(
+ table.insert(inline=True).
+ from_select(("id", "data",),
+ select([table.c.id + 5, table.c.data]).
+ where(table.c.data.in_(["data2", "data3"]))
+ ),
+ )
+
+ eq_(
+ config.db.execute(
+ select([table]).order_by(table.c.data, table.c.id)
+ ).fetchall(),
+ [(1, 'data1', 5, 4), (2, 'data2', 5, 4),
+ (7, 'data2', 5, 4), (3, 'data3', 5, 4), (8, 'data3', 5, 4)]
+ )
+
class ReturningTest(fixtures.TablesTest):
run_create_tables = 'each'
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 575a38db9..08b858b47 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -95,6 +95,39 @@ class ComponentReflectionTest(fixtures.TablesTest):
cls.define_index(metadata, users)
if testing.requires.view_column_reflection.enabled:
cls.define_views(metadata, schema)
+ if not schema and testing.requires.temp_table_reflection.enabled:
+ cls.define_temp_tables(metadata)
+
+ @classmethod
+ def define_temp_tables(cls, metadata):
+ # cheat a bit, we should fix this with some dialect-level
+ # temp table fixture
+ if testing.against("oracle"):
+ kw = {
+ 'prefixes': ["GLOBAL TEMPORARY"],
+ 'oracle_on_commit': 'PRESERVE ROWS'
+ }
+ else:
+ kw = {
+ 'prefixes': ["TEMPORARY"],
+ }
+
+ user_tmp = Table(
+ "user_tmp", metadata,
+ Column("id", sa.INT, primary_key=True),
+ Column('name', sa.VARCHAR(50)),
+ Column('foo', sa.INT),
+ sa.UniqueConstraint('name', name='user_tmp_uq'),
+ sa.Index("user_tmp_ix", "foo"),
+ **kw
+ )
+ if testing.requires.view_reflection.enabled and \
+ testing.requires.temporary_views.enabled:
+ event.listen(
+ user_tmp, "after_create",
+ DDL("create temporary view user_tmp_v as "
+ "select * from user_tmp")
+ )
@classmethod
def define_index(cls, metadata, users):
@@ -147,6 +180,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
users, addresses, dingalings = self.tables.users, \
self.tables.email_addresses, self.tables.dingalings
insp = inspect(meta.bind)
+
if table_type == 'view':
table_names = insp.get_view_names(schema)
table_names.sort()
@@ -162,6 +196,20 @@ class ComponentReflectionTest(fixtures.TablesTest):
answer = ['dingalings', 'email_addresses', 'users']
eq_(sorted(table_names), answer)
+ @testing.requires.temp_table_names
+ def test_get_temp_table_names(self):
+ insp = inspect(testing.db)
+ temp_table_names = insp.get_temp_table_names()
+ eq_(sorted(temp_table_names), ['user_tmp'])
+
+ @testing.requires.view_reflection
+ @testing.requires.temp_table_names
+ @testing.requires.temporary_views
+ def test_get_temp_view_names(self):
+ insp = inspect(self.metadata.bind)
+ temp_table_names = insp.get_temp_view_names()
+ eq_(sorted(temp_table_names), ['user_tmp_v'])
+
@testing.requires.table_reflection
def test_get_table_names(self):
self._test_get_table_names()
@@ -294,6 +342,28 @@ class ComponentReflectionTest(fixtures.TablesTest):
def test_get_columns_with_schema(self):
self._test_get_columns(schema=testing.config.test_schema)
+ @testing.requires.temp_table_reflection
+ def test_get_temp_table_columns(self):
+ meta = MetaData(testing.db)
+ user_tmp = self.tables.user_tmp
+ insp = inspect(meta.bind)
+ cols = insp.get_columns('user_tmp')
+ self.assert_(len(cols) > 0, len(cols))
+
+ for i, col in enumerate(user_tmp.columns):
+ eq_(col.name, cols[i]['name'])
+
+ @testing.requires.temp_table_reflection
+ @testing.requires.view_column_reflection
+ @testing.requires.temporary_views
+ def test_get_temp_view_columns(self):
+ insp = inspect(self.metadata.bind)
+ cols = insp.get_columns('user_tmp_v')
+ eq_(
+ [col['name'] for col in cols],
+ ['id', 'name', 'foo']
+ )
+
@testing.requires.view_column_reflection
def test_get_view_columns(self):
self._test_get_columns(table_type='view')
@@ -426,6 +496,28 @@ class ComponentReflectionTest(fixtures.TablesTest):
def test_get_unique_constraints(self):
self._test_get_unique_constraints()
+ @testing.requires.temp_table_reflection
+ @testing.requires.unique_constraint_reflection
+ def test_get_temp_table_unique_constraints(self):
+ insp = inspect(self.metadata.bind)
+ reflected = insp.get_unique_constraints('user_tmp')
+ for refl in reflected:
+ # Different dialects handle duplicate index and constraints
+ # differently, so ignore this flag
+ refl.pop('duplicates_index', None)
+ eq_(reflected, [{'column_names': ['name'], 'name': 'user_tmp_uq'}])
+
+ @testing.requires.temp_table_reflection
+ def test_get_temp_table_indexes(self):
+ insp = inspect(self.metadata.bind)
+ indexes = insp.get_indexes('user_tmp')
+ eq_(
+ # TODO: we need to add better filtering for indexes/uq constraints
+ # that are doubled up
+ [idx for idx in indexes if idx['name'] == 'user_tmp_ix'],
+ [{'unique': False, 'column_names': ['foo'], 'name': 'user_tmp_ix'}]
+ )
+
@testing.requires.unique_constraint_reflection
@testing.requires.schemas
def test_get_unique_constraints_with_schema(self):
@@ -466,6 +558,9 @@ class ComponentReflectionTest(fixtures.TablesTest):
)
for orig, refl in zip(uniques, reflected):
+ # Different dialects handle duplicate index and constraints
+ # differently, so ignore this flag
+ refl.pop('duplicates_index', None)
eq_(orig, refl)
@testing.provide_metadata
diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py
index c963b18c3..dfed5b90a 100644
--- a/lib/sqlalchemy/util/__init__.py
+++ b/lib/sqlalchemy/util/__init__.py
@@ -33,7 +33,8 @@ from .langhelpers import iterate_attributes, class_hierarchy, \
duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
classproperty, set_creation_order, warn_exception, warn, NoneType,\
constructor_copy, methods_equivalent, chop_traceback, asint,\
- generic_repr, counter, PluginLoader, hybridmethod, safe_reraise,\
+ generic_repr, counter, PluginLoader, hybridproperty, hybridmethod, \
+ safe_reraise,\
get_callable_argspec, only_once, attrsetter, ellipses_string, \
warn_limited
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index 76f85f605..5c17bea88 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -134,7 +134,8 @@ def public_factory(target, location):
fn = target.__init__
callable_ = target
doc = "Construct a new :class:`.%s` object. \n\n"\
- "This constructor is mirrored as a public API function; see :func:`~%s` "\
+ "This constructor is mirrored as a public API function; "\
+ "see :func:`~%s` "\
"for a full usage and argument description." % (
target.__name__, location, )
else:
@@ -155,6 +156,7 @@ def %(name)s(%(args)s):
exec(code, env)
decorated = env[location_name]
decorated.__doc__ = fn.__doc__
+ decorated.__module__ = "sqlalchemy" + location.rsplit(".", 1)[0]
if compat.py2k or hasattr(fn, '__func__'):
fn.__func__.__doc__ = doc
else:
@@ -490,7 +492,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()):
val = getattr(obj, arg, missing)
if val is not missing and val != defval:
output.append('%s=%r' % (arg, val))
- except:
+ except Exception:
pass
if additional_kw:
@@ -499,7 +501,7 @@ def generic_repr(obj, additional_kw=(), to_inspect=None, omit_kwarg=()):
val = getattr(obj, arg, missing)
if val is not missing and val != defval:
output.append('%s=%r' % (arg, val))
- except:
+ except Exception:
pass
return "%s(%s)" % (obj.__class__.__name__, ", ".join(output))
@@ -1090,10 +1092,23 @@ class classproperty(property):
return desc.fget(cls)
+class hybridproperty(object):
+ def __init__(self, func):
+ self.func = func
+
+ def __get__(self, instance, owner):
+ if instance is None:
+ clsval = self.func(owner)
+ clsval.__doc__ = self.func.__doc__
+ return clsval
+ else:
+ return self.func(instance)
+
+
class hybridmethod(object):
"""Decorate a function as cls- or instance- level."""
- def __init__(self, func, expr=None):
+ def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
@@ -1185,7 +1200,7 @@ def warn_exception(func, *args, **kwargs):
"""
try:
return func(*args, **kwargs)
- except:
+ except Exception:
warn("%s('%s') ignored" % sys.exc_info()[0:2])
diff --git a/setup.cfg b/setup.cfg
index 698c4b037..51a4e30bf 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -26,6 +26,13 @@ profile_file=test/profiles.txt
# create database link test_link connect to scott identified by tiger using 'xe';
oracle_db_link = test_link
+# host name of a postgres database that has the postgres_fdw extension.
+# to create this run:
+# CREATE EXTENSION postgres_fdw;
+# GRANT USAGE ON FOREIGN DATA WRAPPER postgres_fdw TO public;
+# this can be localhost to create a loopback foreign table
+# postgres_test_db_link = localhost
+
[db]
default=sqlite:///:memory:
diff --git a/sqla_nose.py b/sqla_nose.py
index f89a1dce0..b977f4bf5 100755
--- a/sqla_nose.py
+++ b/sqla_nose.py
@@ -8,22 +8,25 @@ installs SQLAlchemy's testing plugin into the local environment.
"""
import sys
import nose
-import warnings
+import os
-from os import path
for pth in ['./lib']:
- sys.path.insert(0, path.join(path.dirname(path.abspath(__file__)), pth))
+ sys.path.insert(
+ 0, os.path.join(os.path.dirname(os.path.abspath(__file__)), pth))
-# installing without importing SQLAlchemy, so that coverage includes
-# SQLAlchemy itself.
-path = "lib/sqlalchemy/testing/plugin/noseplugin.py"
-if sys.version_info >= (3, 3):
- from importlib import machinery
- noseplugin = machinery.SourceFileLoader("noseplugin", path).load_module()
-else:
- import imp
- noseplugin = imp.load_source("noseplugin", path)
+# use bootstrapping so that test plugins are loaded
+# without touching the main library before coverage starts
+bootstrap_file = os.path.join(
+ os.path.dirname(__file__), "lib", "sqlalchemy",
+ "testing", "plugin", "bootstrap.py"
+)
+with open(bootstrap_file) as f:
+ code = compile(f.read(), "bootstrap.py", 'exec')
+ to_bootstrap = "nose"
+ exec(code, globals(), locals())
-nose.main(addplugins=[noseplugin.NoseSQLAlchemy()])
+
+from noseplugin import NoseSQLAlchemy
+nose.main(addplugins=[NoseSQLAlchemy()])
diff --git a/test/base/test_events.py b/test/base/test_events.py
index 30b728cd3..89379961e 100644
--- a/test/base/test_events.py
+++ b/test/base/test_events.py
@@ -192,7 +192,7 @@ class EventsTest(fixtures.TestBase):
class NamedCallTest(fixtures.TestBase):
- def setUp(self):
+ def _fixture(self):
class TargetEventsOne(event.Events):
def event_one(self, x, y):
pass
@@ -205,48 +205,104 @@ class NamedCallTest(fixtures.TestBase):
class TargetOne(object):
dispatch = event.dispatcher(TargetEventsOne)
- self.TargetOne = TargetOne
+ return TargetOne
- def tearDown(self):
- event.base._remove_dispatcher(
- self.TargetOne.__dict__['dispatch'].events)
+ def _wrapped_fixture(self):
+ class TargetEvents(event.Events):
+ @classmethod
+ def _listen(cls, event_key):
+ fn = event_key._listen_fn
+
+ def adapt(*args):
+ fn(*["adapted %s" % arg for arg in args])
+ event_key = event_key.with_wrapper(adapt)
+
+ event_key.base_listen()
+
+ def event_one(self, x, y):
+ pass
+
+ def event_five(self, x, y, z, q):
+ pass
+
+ class Target(object):
+ dispatch = event.dispatcher(TargetEvents)
+ return Target
def test_kw_accept(self):
+ TargetOne = self._fixture()
+
canary = Mock()
- @event.listens_for(self.TargetOne, "event_one", named=True)
+ @event.listens_for(TargetOne, "event_one", named=True)
def handler1(**kw):
canary(kw)
- self.TargetOne().dispatch.event_one(4, 5)
+ TargetOne().dispatch.event_one(4, 5)
eq_(
canary.mock_calls,
[call({"x": 4, "y": 5})]
)
+ def test_kw_accept_wrapped(self):
+ TargetOne = self._wrapped_fixture()
+
+ canary = Mock()
+
+ @event.listens_for(TargetOne, "event_one", named=True)
+ def handler1(**kw):
+ canary(kw)
+
+ TargetOne().dispatch.event_one(4, 5)
+
+ eq_(
+ canary.mock_calls,
+ [call({'y': 'adapted 5', 'x': 'adapted 4'})]
+ )
+
def test_partial_kw_accept(self):
+ TargetOne = self._fixture()
+
canary = Mock()
- @event.listens_for(self.TargetOne, "event_five", named=True)
+ @event.listens_for(TargetOne, "event_five", named=True)
def handler1(z, y, **kw):
canary(z, y, kw)
- self.TargetOne().dispatch.event_five(4, 5, 6, 7)
+ TargetOne().dispatch.event_five(4, 5, 6, 7)
eq_(
canary.mock_calls,
[call(6, 5, {"x": 4, "q": 7})]
)
+ def test_partial_kw_accept_wrapped(self):
+ TargetOne = self._wrapped_fixture()
+
+ canary = Mock()
+
+ @event.listens_for(TargetOne, "event_five", named=True)
+ def handler1(z, y, **kw):
+ canary(z, y, kw)
+
+ TargetOne().dispatch.event_five(4, 5, 6, 7)
+
+ eq_(
+ canary.mock_calls,
+ [call('adapted 6', 'adapted 5',
+ {'q': 'adapted 7', 'x': 'adapted 4'})]
+ )
+
def test_kw_accept_plus_kw(self):
+ TargetOne = self._fixture()
canary = Mock()
- @event.listens_for(self.TargetOne, "event_two", named=True)
+ @event.listens_for(TargetOne, "event_two", named=True)
def handler1(**kw):
canary(kw)
- self.TargetOne().dispatch.event_two(4, 5, z=8, q=5)
+ TargetOne().dispatch.event_two(4, 5, z=8, q=5)
eq_(
canary.mock_calls,
@@ -996,6 +1052,25 @@ class RemovalTest(fixtures.TestBase):
dispatch = event.dispatcher(TargetEvents)
return Target
+ def _wrapped_fixture(self):
+ class TargetEvents(event.Events):
+ @classmethod
+ def _listen(cls, event_key):
+ fn = event_key._listen_fn
+
+ def adapt(value):
+ fn("adapted " + value)
+ event_key = event_key.with_wrapper(adapt)
+
+ event_key.base_listen()
+
+ def event_one(self, x):
+ pass
+
+ class Target(object):
+ dispatch = event.dispatcher(TargetEvents)
+ return Target
+
def test_clslevel(self):
Target = self._fixture()
@@ -1194,3 +1269,71 @@ class RemovalTest(fixtures.TestBase):
"deque mutated during iteration",
t1.dispatch.event_one
)
+
+ def test_remove_plain_named(self):
+ Target = self._fixture()
+
+ listen_one = Mock()
+ t1 = Target()
+ event.listen(t1, "event_one", listen_one, named=True)
+ t1.dispatch.event_one("t1")
+
+ eq_(listen_one.mock_calls, [call(x="t1")])
+ event.remove(t1, "event_one", listen_one)
+ t1.dispatch.event_one("t2")
+
+ eq_(listen_one.mock_calls, [call(x="t1")])
+
+ def test_remove_wrapped_named(self):
+ Target = self._wrapped_fixture()
+
+ listen_one = Mock()
+ t1 = Target()
+ event.listen(t1, "event_one", listen_one, named=True)
+ t1.dispatch.event_one("t1")
+
+ eq_(listen_one.mock_calls, [call(x="adapted t1")])
+ event.remove(t1, "event_one", listen_one)
+ t1.dispatch.event_one("t2")
+
+ eq_(listen_one.mock_calls, [call(x="adapted t1")])
+
+ def test_double_event_nonwrapped(self):
+ Target = self._fixture()
+
+ listen_one = Mock()
+ t1 = Target()
+ event.listen(t1, "event_one", listen_one)
+ event.listen(t1, "event_one", listen_one)
+
+ t1.dispatch.event_one("t1")
+
+ # doubles are eliminated
+ eq_(listen_one.mock_calls, [call("t1")])
+
+ # only one remove needed
+ event.remove(t1, "event_one", listen_one)
+ t1.dispatch.event_one("t2")
+
+ eq_(listen_one.mock_calls, [call("t1")])
+
+ def test_double_event_wrapped(self):
+ # this is issue #3199
+ Target = self._wrapped_fixture()
+
+ listen_one = Mock()
+ t1 = Target()
+
+ event.listen(t1, "event_one", listen_one)
+ event.listen(t1, "event_one", listen_one)
+
+ t1.dispatch.event_one("t1")
+
+ # doubles are eliminated
+ eq_(listen_one.mock_calls, [call("adapted t1")])
+
+ # only one remove needed
+ event.remove(t1, "event_one", listen_one)
+ t1.dispatch.event_one("t2")
+
+ eq_(listen_one.mock_calls, [call("adapted t1")])
diff --git a/test/base/test_except.py b/test/base/test_except.py
index a62382725..918e7a042 100644
--- a/test/base/test_except.py
+++ b/test/base/test_except.py
@@ -2,19 +2,12 @@
from sqlalchemy import exc as sa_exceptions
-from sqlalchemy import util
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import eq_
-if util.py2k:
- from exceptions import StandardError, KeyboardInterrupt, SystemExit
-else:
- Exception = BaseException
-
class Error(Exception):
- """This class will be old-style on <= 2.4 and new-style on >=
- 2.5."""
+ pass
class DatabaseError(Error):
@@ -26,6 +19,7 @@ class OperationalError(DatabaseError):
class ProgrammingError(DatabaseError):
+
def __str__(self):
return '<%s>' % self.bogus
@@ -38,89 +32,110 @@ class WrapTest(fixtures.TestBase):
def test_db_error_normal(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- OperationalError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError:
self.assert_(True)
def test_tostring(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message'
- , None, OperationalError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
+ None, OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- assert str(exc) \
- == "(OperationalError) 'this is a message' None"
+ eq_(
+ str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message']")
def test_tostring_large_dict(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message'
- ,
- {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h':
- 8, 'i': 9, 'j': 10, 'k': 11,
- }, OperationalError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
+ {
+ 'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7,
+ 'h': 8, 'i': 9, 'j': 10, 'k': 11
+ },
+ OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- assert str(exc).startswith("(OperationalError) 'this is a "
- "message' {")
+ assert str(exc).startswith(
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: {")
def test_tostring_large_list(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
- [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,],
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- assert str(exc).startswith("(OperationalError) 'this is a "
- "message' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]")
+ assert str(exc).startswith(
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: "
+ "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]]")
def test_tostring_large_executemany(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
[{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
- {1: 1}, {1:1}, {1: 1}, {1: 1},],
+ {1: 1}, {1: 1}, {1: 1}, {1: 1}, ],
OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- eq_(str(exc) ,
- "(OperationalError) 'this is a message' [{1: 1}, "\
- "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: "\
- "1}, {1: 1}, {1: 1}]")
+ eq_(
+ str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [{1: 1}, "
+ "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: "
+ "1}, {1: 1}, {1: 1}]]"
+ )
try:
raise sa_exceptions.DBAPIError.instance('this is a message', [
{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
- {1:1}, {1: 1}, {1: 1}, {1: 1},
- ], OperationalError(), DatabaseError)
+ {1: 1}, {1: 1}, {1: 1}, {1: 1},
+ ], OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
- eq_(str(exc) ,
- "(OperationalError) 'this is a message' [{1: 1}, "
+ eq_(str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [{1: 1}, "
"{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, "
"{1: 1}, {1: 1} ... displaying 10 of 11 total "
- "bound parameter sets ... {1: 1}, {1: 1}]"
- )
+ "bound parameter sets ... {1: 1}, {1: 1}]]"
+ )
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
+ raise sa_exceptions.DBAPIError.instance(
+ 'this is a message',
[
- (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
- (1, ),
+ (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
+ (1, ), (1, ), (1, ), (1, ),
], OperationalError(), DatabaseError)
+
except sa_exceptions.DBAPIError as exc:
- eq_(str(exc),
- "(OperationalError) 'this is a message' [(1,), "\
- "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]")
+ eq_(
+ str(exc),
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [(1,), "
+ "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]]")
try:
raise sa_exceptions.DBAPIError.instance('this is a message', [
(1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
(1, ), (1, ),
- ], OperationalError(), DatabaseError)
+ ], OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError as exc:
eq_(str(exc),
- "(OperationalError) 'this is a message' [(1,), "
+ "(test.base.test_except.OperationalError) "
+ "[SQL: 'this is a message'] [parameters: [(1,), "
"(1,), (1,), (1,), (1,), (1,), (1,), (1,) "
"... displaying 10 of 11 total bound "
- "parameter sets ... (1,), (1,)]"
- )
+ "parameter sets ... (1,), (1,)]]"
+ )
def test_db_error_busted_dbapi(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- ProgrammingError(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ ProgrammingError(), DatabaseError)
except sa_exceptions.DBAPIError as e:
self.assert_(True)
self.assert_('Error in str() of DB-API' in e.args[0])
@@ -147,8 +162,9 @@ class WrapTest(fixtures.TestBase):
def test_db_error_keyboard_interrupt(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- KeyboardInterrupt(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ KeyboardInterrupt(), DatabaseError)
except sa_exceptions.DBAPIError:
self.assert_(False)
except KeyboardInterrupt:
@@ -156,8 +172,9 @@ class WrapTest(fixtures.TestBase):
def test_db_error_system_exit(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [],
- SystemExit(), DatabaseError)
+ raise sa_exceptions.DBAPIError.instance(
+ '', [],
+ SystemExit(), DatabaseError)
except sa_exceptions.DBAPIError:
self.assert_(False)
except SystemExit:
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index a378b0160..f75c5cbe9 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -6,7 +6,7 @@ from sqlalchemy.testing import eq_, is_, ne_, fails_if
from sqlalchemy.testing.util import picklers, gc_collect
from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec
from sqlalchemy.sql import column
-
+from sqlalchemy.util import langhelpers
class _KeyedTupleTest(object):
@@ -1274,6 +1274,43 @@ class DuckTypeCollectionTest(fixtures.TestBase):
is_(util.duck_type_collection(instance), None)
+class PublicFactoryTest(fixtures.TestBase):
+
+ def _fixture(self):
+ class Thingy(object):
+ def __init__(self, value):
+ "make a thingy"
+ self.value = value
+
+ @classmethod
+ def foobar(cls, x, y):
+ "do the foobar"
+ return Thingy(x + y)
+
+ return Thingy
+
+ def test_classmethod(self):
+ Thingy = self._fixture()
+ foob = langhelpers.public_factory(
+ Thingy.foobar, ".sql.elements.foob")
+ eq_(foob(3, 4).value, 7)
+ eq_(foob(x=3, y=4).value, 7)
+ eq_(foob.__doc__, "do the foobar")
+ eq_(foob.__module__, "sqlalchemy.sql.elements")
+ assert Thingy.foobar.__doc__.startswith("This function is mirrored;")
+
+ def test_constructor(self):
+ Thingy = self._fixture()
+ foob = langhelpers.public_factory(
+ Thingy, ".sql.elements.foob")
+ eq_(foob(7).value, 7)
+ eq_(foob(value=7).value, 7)
+ eq_(foob.__doc__, "make a thingy")
+ eq_(foob.__module__, "sqlalchemy.sql.elements")
+ assert Thingy.__init__.__doc__.startswith(
+ "Construct a new :class:`.Thingy` object.")
+
+
class ArgInspectionTest(fixtures.TestBase):
def test_get_cls_kwargs(self):
diff --git a/test/conftest.py b/test/conftest.py
index 1dd442309..c697085ee 100755
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -7,9 +7,23 @@ installs SQLAlchemy's testing plugin into the local environment.
"""
import sys
+import os
-from os import path
for pth in ['../lib']:
- sys.path.insert(0, path.join(path.dirname(path.abspath(__file__)), pth))
+ sys.path.insert(
+ 0,
+ os.path.join(os.path.dirname(os.path.abspath(__file__)), pth))
-from sqlalchemy.testing.plugin.pytestplugin import *
+
+# use bootstrapping so that test plugins are loaded
+# without touching the main library before coverage starts
+bootstrap_file = os.path.join(
+ os.path.dirname(__file__), "..", "lib", "sqlalchemy",
+ "testing", "plugin", "bootstrap.py"
+)
+
+with open(bootstrap_file) as f:
+ code = compile(f.read(), "bootstrap.py", 'exec')
+ to_bootstrap = "pytest"
+ exec(code, globals(), locals())
+ from pytestplugin import * # noqa
diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py
index 8ac9c6c16..4b4780d43 100644
--- a/test/dialect/mssql/test_engine.py
+++ b/test/dialect/mssql/test_engine.py
@@ -7,6 +7,8 @@ from sqlalchemy.engine import url
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message, assert_warnings
+from sqlalchemy.testing.mock import Mock
+
class ParseConnectTest(fixtures.TestBase):
@@ -167,3 +169,21 @@ class ParseConnectTest(fixtures.TestBase):
assert_raises_message(exc.SAWarning,
'Unrecognized server version info',
engine.connect)
+
+
+class VersionDetectionTest(fixtures.TestBase):
+ def test_pymssql_version(self):
+ dialect = pymssql.MSDialect_pymssql()
+
+ for vers in [
+ "Microsoft SQL Server Blah - 11.0.9216.62",
+ "Microsoft SQL Server (XYZ) - 11.0.9216.62 \n"
+ "Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation",
+ "Microsoft SQL Azure (RTM) - 11.0.9216.62 \n"
+ "Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation"
+ ]:
+ conn = Mock(scalar=Mock(return_value=vers))
+ eq_(
+ dialect._get_server_version_info(conn),
+ (11, 0, 9216, 62)
+ ) \ No newline at end of file
diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py
index bf35a2c6b..99733e397 100644
--- a/test/dialect/mysql/test_reflection.py
+++ b/test/dialect/mysql/test_reflection.py
@@ -283,6 +283,38 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
view_names = dialect.get_view_names(connection, "information_schema")
self.assert_('TABLES' in view_names)
+ @testing.provide_metadata
+ def test_reflection_with_unique_constraint(self):
+ insp = inspect(testing.db)
+
+ meta = self.metadata
+ uc_table = Table('mysql_uc', meta,
+ Column('a', String(10)),
+ UniqueConstraint('a', name='uc_a'))
+
+ uc_table.create()
+
+ # MySQL converts unique constraints into unique indexes.
+ # separately we get both
+ indexes = dict((i['name'], i) for i in insp.get_indexes('mysql_uc'))
+ constraints = set(i['name']
+ for i in insp.get_unique_constraints('mysql_uc'))
+
+ self.assert_('uc_a' in indexes)
+ self.assert_(indexes['uc_a']['unique'])
+ self.assert_('uc_a' in constraints)
+
+ # reflection here favors the unique index, as that's the
+ # more "official" MySQL construct
+ reflected = Table('mysql_uc', MetaData(testing.db), autoload=True)
+
+ indexes = dict((i.name, i) for i in reflected.indexes)
+ constraints = set(uc.name for uc in reflected.constraints)
+
+ self.assert_('uc_a' in indexes)
+ self.assert_(indexes['uc_a'].unique)
+ self.assert_('uc_a' not in constraints)
+
class RawReflectionTest(fixtures.TestBase):
def setup(self):
diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py
index 75dbe15e0..e65acc6db 100644
--- a/test/dialect/mysql/test_types.py
+++ b/test/dialect/mysql/test_types.py
@@ -154,10 +154,8 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
res
)
- @testing.fails_if(
- lambda: testing.against("mysql+mysqlconnector")
- and not util.py3k,
- "bug in mysqlconnector; http://bugs.mysql.com/bug.php?id=73266")
+ # fixed in mysql-connector as of 2.0.1,
+ # see http://bugs.mysql.com/bug.php?id=73266
@testing.provide_metadata
def test_precision_float_roundtrip(self):
t = Table('t', self.metadata,
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index bab41b0f7..8de71216e 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -7,14 +7,130 @@ from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy import inspect
from sqlalchemy import Table, Column, MetaData, Integer, String, \
- PrimaryKeyConstraint, ForeignKey, join, Sequence
+ PrimaryKeyConstraint, ForeignKey, join, Sequence, UniqueConstraint, \
+ Index
from sqlalchemy import exc
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import base as postgresql
-class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
+class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults):
+ """Test reflection on foreign tables"""
+
+ __requires__ = 'postgresql_test_dblink',
+ __only_on__ = 'postgresql >= 9.3'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ from sqlalchemy.testing import config
+ dblink = config.file_config.get(
+ 'sqla_testing', 'postgres_test_db_link')
+
+ testtable = Table(
+ 'testtable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30)))
+
+ for ddl in [
+ "CREATE SERVER test_server FOREIGN DATA WRAPPER postgres_fdw "
+ "OPTIONS (dbname 'test', host '%s')" % dblink,
+ "CREATE USER MAPPING FOR public \
+ SERVER test_server options (user 'scott', password 'tiger')",
+ "CREATE FOREIGN TABLE test_foreigntable ( "
+ " id INT, "
+ " data VARCHAR(30) "
+ ") SERVER test_server OPTIONS (table_name 'testtable')",
+ ]:
+ sa.event.listen(metadata, "after_create", sa.DDL(ddl))
+
+ for ddl in [
+ 'DROP FOREIGN TABLE test_foreigntable',
+ 'DROP USER MAPPING FOR public SERVER test_server',
+ "DROP SERVER test_server"
+ ]:
+ sa.event.listen(metadata, "before_drop", sa.DDL(ddl))
+
+ def test_foreign_table_is_reflected(self):
+ metadata = MetaData(testing.db)
+ table = Table('test_foreigntable', metadata, autoload=True)
+ eq_(set(table.columns.keys()), set(['id', 'data']),
+ "Columns of reflected foreign table didn't equal expected columns")
+
+ def test_get_foreign_table_names(self):
+ inspector = inspect(testing.db)
+ with testing.db.connect() as conn:
+ ft_names = inspector.get_foreign_table_names()
+ eq_(ft_names, ['test_foreigntable'])
+
+ def test_get_table_names_no_foreign(self):
+ inspector = inspect(testing.db)
+ with testing.db.connect() as conn:
+ names = inspector.get_table_names()
+ eq_(names, ['testtable'])
+
+
+class MaterialiedViewReflectionTest(
+ fixtures.TablesTest, AssertsExecutionResults):
+ """Test reflection on materialized views"""
+
+ __only_on__ = 'postgresql >= 9.3'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ testtable = Table(
+ 'testtable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(30)))
+
+ # insert data before we create the view
+ @sa.event.listens_for(testtable, "after_create")
+ def insert_data(target, connection, **kw):
+ connection.execute(
+ target.insert(),
+ {"id": 89, "data": 'd1'}
+ )
+
+ materialized_view = sa.DDL(
+ "CREATE MATERIALIZED VIEW test_mview AS "
+ "SELECT * FROM testtable")
+
+ plain_view = sa.DDL(
+ "CREATE VIEW test_regview AS "
+ "SELECT * FROM testtable")
+
+ sa.event.listen(testtable, 'after_create', plain_view)
+ sa.event.listen(testtable, 'after_create', materialized_view)
+ sa.event.listen(
+ testtable, 'before_drop',
+ sa.DDL("DROP MATERIALIZED VIEW test_mview")
+ )
+ sa.event.listen(
+ testtable, 'before_drop',
+ sa.DDL("DROP VIEW test_regview")
+ )
+ def test_mview_is_reflected(self):
+ metadata = MetaData(testing.db)
+ table = Table('test_mview', metadata, autoload=True)
+ eq_(set(table.columns.keys()), set(['id', 'data']),
+ "Columns of reflected mview didn't equal expected columns")
+
+ def test_mview_select(self):
+ metadata = MetaData(testing.db)
+ table = Table('test_mview', metadata, autoload=True)
+ eq_(
+ table.select().execute().fetchall(),
+ [(89, 'd1',)]
+ )
+
+ def test_get_view_names(self):
+ insp = inspect(testing.db)
+ eq_(set(insp.get_view_names()), set(['test_mview', 'test_regview']))
+
+
+class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
"""Test PostgreSQL domains"""
__only_on__ = 'postgresql > 8.3'
@@ -688,6 +804,66 @@ class ReflectionTest(fixtures.TestBase):
'labels': ['sad', 'ok', 'happy']
}])
+ @testing.provide_metadata
+ def test_reflection_with_unique_constraint(self):
+ insp = inspect(testing.db)
+
+ meta = self.metadata
+ uc_table = Table('pgsql_uc', meta,
+ Column('a', String(10)),
+ UniqueConstraint('a', name='uc_a'))
+
+ uc_table.create()
+
+ # PostgreSQL will create an implicit index for a unique
+ # constraint. Separately we get both
+ indexes = set(i['name'] for i in insp.get_indexes('pgsql_uc'))
+ constraints = set(i['name']
+ for i in insp.get_unique_constraints('pgsql_uc'))
+
+ self.assert_('uc_a' in indexes)
+ self.assert_('uc_a' in constraints)
+
+ # reflection corrects for the dupe
+ reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True)
+
+ indexes = set(i.name for i in reflected.indexes)
+ constraints = set(uc.name for uc in reflected.constraints)
+
+ self.assert_('uc_a' not in indexes)
+ self.assert_('uc_a' in constraints)
+
+ @testing.provide_metadata
+ def test_reflect_unique_index(self):
+ insp = inspect(testing.db)
+
+ meta = self.metadata
+
+ # a unique index OTOH we are able to detect is an index
+ # and not a unique constraint
+ uc_table = Table('pgsql_uc', meta,
+ Column('a', String(10)),
+ Index('ix_a', 'a', unique=True))
+
+ uc_table.create()
+
+ indexes = dict((i['name'], i) for i in insp.get_indexes('pgsql_uc'))
+ constraints = set(i['name']
+ for i in insp.get_unique_constraints('pgsql_uc'))
+
+ self.assert_('ix_a' in indexes)
+ assert indexes['ix_a']['unique']
+ self.assert_('ix_a' not in constraints)
+
+ reflected = Table('pgsql_uc', MetaData(testing.db), autoload=True)
+
+ indexes = dict((i.name, i) for i in reflected.indexes)
+ constraints = set(uc.name for uc in reflected.constraints)
+
+ self.assert_('ix_a' in indexes)
+ assert indexes['ix_a'].unique
+ self.assert_('ix_a' not in constraints)
+
class CustomTypeReflectionTest(fixtures.TestBase):
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index 187042036..a771c5d80 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -104,6 +104,28 @@ class QuotedBindRoundTripTest(fixtures.TestBase):
(2, 2, 2)
)
+ def test_numeric_bind_round_trip(self):
+ eq_(
+ testing.db.scalar(
+ select([
+ literal_column("2", type_=Integer()) +
+ bindparam("2_1", value=2)])
+ ),
+ 4
+ )
+
+ @testing.provide_metadata
+ def test_numeric_bind_in_crud(self):
+ t = Table(
+ "asfd", self.metadata,
+ Column("100K", Integer)
+ )
+ t.create()
+
+ testing.db.execute(t.insert(), {"100K": 10})
+ eq_(
+ testing.db.scalar(t.select()), 10
+ )
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "oracle" #oracle.dialect()
@@ -648,6 +670,23 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"CREATE INDEX bar ON foo (x > 5)"
)
+ def test_table_options(self):
+ m = MetaData()
+
+ t = Table(
+ 'foo', m,
+ Column('x', Integer),
+ prefixes=["GLOBAL TEMPORARY"],
+ oracle_on_commit="PRESERVE ROWS"
+ )
+
+ self.assert_compile(
+ schema.CreateTable(t),
+ "CREATE GLOBAL TEMPORARY TABLE "
+ "foo (x INTEGER) ON COMMIT PRESERVE ROWS"
+ )
+
+
class CompatFlagsTest(fixtures.TestBase, AssertsCompiledSQL):
def _dialect(self, server_version, **kw):
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py
index e77a03980..124208dbe 100644
--- a/test/dialect/test_sqlite.py
+++ b/test/dialect/test_sqlite.py
@@ -11,7 +11,7 @@ from sqlalchemy import Table, select, bindparam, Column,\
UniqueConstraint
from sqlalchemy.types import Integer, String, Boolean, DateTime, Date, Time
from sqlalchemy import types as sqltypes
-from sqlalchemy import event
+from sqlalchemy import event, inspect
from sqlalchemy.util import u, ue
from sqlalchemy import exc, sql, schema, pool, util
from sqlalchemy.dialects.sqlite import base as sqlite, \
@@ -480,57 +480,6 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
assert u('méil') in result.keys()
assert ue('\u6e2c\u8a66') in result.keys()
- def test_attached_as_schema(self):
- cx = testing.db.connect()
- try:
- cx.execute('ATTACH DATABASE ":memory:" AS test_schema')
- dialect = cx.dialect
- assert dialect.get_table_names(cx, 'test_schema') == []
- meta = MetaData(cx)
- Table('created', meta, Column('id', Integer),
- schema='test_schema')
- alt_master = Table('sqlite_master', meta, autoload=True,
- schema='test_schema')
- meta.create_all(cx)
- eq_(dialect.get_table_names(cx, 'test_schema'), ['created'])
- assert len(alt_master.c) > 0
- meta.clear()
- reflected = Table('created', meta, autoload=True,
- schema='test_schema')
- assert len(reflected.c) == 1
- cx.execute(reflected.insert(), dict(id=1))
- r = cx.execute(reflected.select()).fetchall()
- assert list(r) == [(1, )]
- cx.execute(reflected.update(), dict(id=2))
- r = cx.execute(reflected.select()).fetchall()
- assert list(r) == [(2, )]
- cx.execute(reflected.delete(reflected.c.id == 2))
- r = cx.execute(reflected.select()).fetchall()
- assert list(r) == []
-
- # note that sqlite_master is cleared, above
-
- meta.drop_all()
- assert dialect.get_table_names(cx, 'test_schema') == []
- finally:
- cx.execute('DETACH DATABASE test_schema')
-
- @testing.exclude('sqlite', '<', (2, 6), 'no database support')
- def test_temp_table_reflection(self):
- cx = testing.db.connect()
- try:
- cx.execute('CREATE TEMPORARY TABLE tempy (id INT)')
- assert 'tempy' in cx.dialect.get_table_names(cx, None)
- meta = MetaData(cx)
- tempy = Table('tempy', meta, autoload=True)
- assert len(tempy.c) == 1
- meta.drop_all()
- except:
- try:
- cx.execute('DROP TABLE tempy')
- except exc.DBAPIError:
- pass
- raise
def test_file_path_is_absolute(self):
d = pysqlite_dialect.dialect()
@@ -549,7 +498,6 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
e = create_engine('sqlite+pysqlite:///foo.db')
assert e.pool.__class__ is pool.NullPool
-
def test_dont_reflect_autoindex(self):
meta = MetaData(testing.db)
t = Table('foo', meta, Column('bar', String, primary_key=True))
@@ -575,6 +523,125 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
finally:
meta.drop_all()
+ def test_get_unique_constraints(self):
+ meta = MetaData(testing.db)
+ t1 = Table('foo', meta, Column('f', Integer),
+ UniqueConstraint('f', name='foo_f'))
+ t2 = Table('bar', meta, Column('b', Integer),
+ UniqueConstraint('b', name='bar_b'),
+ prefixes=['TEMPORARY'])
+ meta.create_all()
+ from sqlalchemy.engine.reflection import Inspector
+ try:
+ inspector = Inspector(testing.db)
+ eq_(inspector.get_unique_constraints('foo'),
+ [{'column_names': [u'f'], 'name': u'foo_f'}])
+ eq_(inspector.get_unique_constraints('bar'),
+ [{'column_names': [u'b'], 'name': u'bar_b'}])
+ finally:
+ meta.drop_all()
+
+
+class AttachedMemoryDBTest(fixtures.TestBase):
+ __only_on__ = 'sqlite'
+
+ dbname = None
+
+ def setUp(self):
+ self.conn = conn = testing.db.connect()
+ if self.dbname is None:
+ dbname = ':memory:'
+ else:
+ dbname = self.dbname
+ conn.execute('ATTACH DATABASE "%s" AS test_schema' % dbname)
+ self.metadata = MetaData()
+
+ def tearDown(self):
+ self.metadata.drop_all(self.conn)
+ self.conn.execute('DETACH DATABASE test_schema')
+ if self.dbname:
+ os.remove(self.dbname)
+
+ def _fixture(self):
+ meta = self.metadata
+ ct = Table(
+ 'created', meta,
+ Column('id', Integer),
+ Column('name', String),
+ schema='test_schema')
+
+ meta.create_all(self.conn)
+ return ct
+
+ def test_no_tables(self):
+ insp = inspect(self.conn)
+ eq_(insp.get_table_names("test_schema"), [])
+
+ def test_table_names_present(self):
+ self._fixture()
+ insp = inspect(self.conn)
+ eq_(insp.get_table_names("test_schema"), ["created"])
+
+ def test_table_names_system(self):
+ self._fixture()
+ insp = inspect(self.conn)
+ eq_(insp.get_table_names("test_schema"), ["created"])
+
+ def test_reflect_system_table(self):
+ meta = MetaData(self.conn)
+ alt_master = Table(
+ 'sqlite_master', meta, autoload=True,
+ autoload_with=self.conn,
+ schema='test_schema')
+ assert len(alt_master.c) > 0
+
+ def test_reflect_user_table(self):
+ self._fixture()
+
+ m2 = MetaData()
+ c2 = Table('created', m2, autoload=True, autoload_with=self.conn)
+ eq_(len(c2.c), 2)
+
+ def test_crud(self):
+ ct = self._fixture()
+
+ self.conn.execute(ct.insert(), {'id': 1, 'name': 'foo'})
+ eq_(
+ self.conn.execute(ct.select()).fetchall(),
+ [(1, 'foo')]
+ )
+
+ self.conn.execute(ct.update(), {'id': 2, 'name': 'bar'})
+ eq_(
+ self.conn.execute(ct.select()).fetchall(),
+ [(2, 'bar')]
+ )
+ self.conn.execute(ct.delete())
+ eq_(
+ self.conn.execute(ct.select()).fetchall(),
+ []
+ )
+
+ def test_col_targeting(self):
+ ct = self._fixture()
+
+ self.conn.execute(ct.insert(), {'id': 1, 'name': 'foo'})
+ row = self.conn.execute(ct.select()).first()
+ eq_(row['id'], 1)
+ eq_(row['name'], 'foo')
+
+ def test_col_targeting_union(self):
+ ct = self._fixture()
+
+ self.conn.execute(ct.insert(), {'id': 1, 'name': 'foo'})
+ row = self.conn.execute(ct.select().union(ct.select())).first()
+ eq_(row['id'], 1)
+ eq_(row['name'], 'foo')
+
+
+class AttachedFileDBTest(AttachedMemoryDBTest):
+ dbname = 'attached_db.db'
+
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index d8e1c655e..5c3279ba9 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -25,6 +25,10 @@ from sqlalchemy.util import nested
users, metadata, users_autoinc = None, None, None
+class SomeException(Exception):
+ pass
+
+
class ExecuteTest(fixtures.TestBase):
__backend__ = True
@@ -280,12 +284,13 @@ class ExecuteTest(fixtures.TestBase):
impl = Integer
def process_bind_param(self, value, dialect):
- raise Exception("nope")
+ raise SomeException("nope")
def _go(conn):
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
+ r"\(test.engine.test_execute.SomeException\) "
+ "nope \[SQL\: u?'SELECT 1 ",
conn.execute,
select([1]).
where(
@@ -479,6 +484,26 @@ class ExecuteTest(fixtures.TestBase):
eq_(canary, ["l1", "l2", "l3", "l1", "l2"])
@testing.requires.ad_hoc_engines
+ def test_autocommit_option_no_issue_first_connect(self):
+ eng = create_engine(testing.db.url)
+ eng.update_execution_options(autocommit=True)
+ conn = eng.connect()
+ eq_(conn._execution_options, {"autocommit": True})
+ conn.close()
+
+ @testing.requires.ad_hoc_engines
+ def test_dialect_init_uses_options(self):
+ eng = create_engine(testing.db.url)
+
+ def my_init(connection):
+ connection.execution_options(foo='bar').execute(select([1]))
+
+ with patch.object(eng.dialect, "initialize", my_init):
+ conn = eng.connect()
+ eq_(conn._execution_options, {})
+ conn.close()
+
+ @testing.requires.ad_hoc_engines
def test_generative_engine_event_dispatch_hasevents(self):
def l1(*arg, **kw):
pass
@@ -541,7 +566,7 @@ class ConvenienceExecuteTest(fixtures.TablesTest):
if is_transaction:
conn = conn.connection
conn.execute(self.table.insert().values(a=x, b=value))
- raise Exception("breakage")
+ raise SomeException("breakage")
return go
def _assert_no_data(self):
@@ -982,6 +1007,17 @@ class ExecutionOptionsTest(fixtures.TestBase):
eq_(c1._execution_options, {"foo": "bar"})
eq_(c2._execution_options, {"foo": "bar", "bat": "hoho"})
+ def test_branched_connection_execution_options(self):
+ engine = testing_engine("sqlite://")
+
+ conn = engine.connect()
+ c2 = conn.execution_options(foo="bar")
+ c2_branch = c2.connect()
+ eq_(
+ c2_branch._execution_options,
+ {"foo": "bar"}
+ )
+
class AlternateResultProxyTest(fixtures.TestBase):
__requires__ = ('sqlite', )
@@ -1440,6 +1476,48 @@ class EngineEventsTest(fixtures.TestBase):
'begin', 'execute', 'cursor_execute', 'commit',
])
+ def test_transactional_named(self):
+ canary = []
+
+ def tracker(name):
+ def go(*args, **kw):
+ canary.append((name, set(kw)))
+ return go
+
+ engine = engines.testing_engine()
+ event.listen(engine, 'before_execute', tracker('execute'), named=True)
+ event.listen(
+ engine, 'before_cursor_execute',
+ tracker('cursor_execute'), named=True)
+ event.listen(engine, 'begin', tracker('begin'), named=True)
+ event.listen(engine, 'commit', tracker('commit'), named=True)
+ event.listen(engine, 'rollback', tracker('rollback'), named=True)
+
+ conn = engine.connect()
+ trans = conn.begin()
+ conn.execute(select([1]))
+ trans.rollback()
+ trans = conn.begin()
+ conn.execute(select([1]))
+ trans.commit()
+
+ eq_(
+ canary, [
+ ('begin', set(['conn', ])),
+ ('execute', set([
+ 'conn', 'clauseelement', 'multiparams', 'params'])),
+ ('cursor_execute', set([
+ 'conn', 'cursor', 'executemany',
+ 'statement', 'parameters', 'context'])),
+ ('rollback', set(['conn', ])), ('begin', set(['conn', ])),
+ ('execute', set([
+ 'conn', 'clauseelement', 'multiparams', 'params'])),
+ ('cursor_execute', set([
+ 'conn', 'cursor', 'executemany', 'statement',
+ 'parameters', 'context'])),
+ ('commit', set(['conn', ]))]
+ )
+
@testing.requires.savepoints
@testing.requires.two_phase_transactions
def test_transactional_advanced(self):
@@ -1524,7 +1602,7 @@ class HandleErrorTest(fixtures.TestBase):
listener = Mock(return_value=None)
event.listen(engine, 'dbapi_error', listener)
- nope = Exception("nope")
+ nope = SomeException("nope")
class MyType(TypeDecorator):
impl = Integer
@@ -1535,7 +1613,8 @@ class HandleErrorTest(fixtures.TestBase):
with engine.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
+ r"\(test.engine.test_execute.SomeException\) "
+ "nope \[SQL\: u?'SELECT 1 ",
conn.execute,
select([1]).where(
column('foo') == literal('bar', MyType()))
@@ -1715,7 +1794,7 @@ class HandleErrorTest(fixtures.TestBase):
listener = Mock(return_value=None)
event.listen(engine, 'handle_error', listener)
- nope = Exception("nope")
+ nope = SomeException("nope")
class MyType(TypeDecorator):
impl = Integer
@@ -1726,7 +1805,8 @@ class HandleErrorTest(fixtures.TestBase):
with engine.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
- r"nope \(original cause: Exception: nope\) u?'SELECT 1 ",
+ r"\(test.engine.test_execute.SomeException\) "
+ "nope \[SQL\: u?'SELECT 1 ",
conn.execute,
select([1]).where(
column('foo') == literal('bar', MyType()))
diff --git a/test/engine/test_logging.py b/test/engine/test_logging.py
index 1432a0f7b..180ea9388 100644
--- a/test/engine/test_logging.py
+++ b/test/engine/test_logging.py
@@ -56,7 +56,8 @@ class LogParamsTest(fixtures.TestBase):
def test_error_large_dict(self):
assert_raises_message(
tsa.exc.DBAPIError,
- r".*'INSERT INTO nonexistent \(data\) values \(:data\)' "
+ r".*'INSERT INTO nonexistent \(data\) values \(:data\)'\] "
+ "\[parameters: "
"\[{'data': '0'}, {'data': '1'}, {'data': '2'}, "
"{'data': '3'}, {'data': '4'}, {'data': '5'}, "
"{'data': '6'}, {'data': '7'} ... displaying 10 of "
@@ -71,8 +72,9 @@ class LogParamsTest(fixtures.TestBase):
assert_raises_message(
tsa.exc.DBAPIError,
r".*INSERT INTO nonexistent \(data\) values "
- "\(\?\)' \[\('0',\), \('1',\), \('2',\), \('3',\), "
- "\('4',\), \('5',\), \('6',\), \('7',\) ... displaying "
+ "\(\?\)'\] \[parameters: \[\('0',\), \('1',\), \('2',\), \('3',\), "
+ "\('4',\), \('5',\), \('6',\), \('7',\) "
+ "... displaying "
"10 of 100 total bound parameter sets ... "
"\('98',\), \('99',\)\]",
lambda: self.eng.execute(
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index c82cca5a1..4500ada6a 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -8,7 +8,7 @@ from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.engines import testing_engine
-from sqlalchemy.testing.mock import Mock, call
+from sqlalchemy.testing.mock import Mock, call, patch
class MockError(Exception):
@@ -504,6 +504,54 @@ class RealReconnectTest(fixtures.TestBase):
# pool isn't replaced
assert self.engine.pool is p2
+ def test_branched_invalidate_branch_to_parent(self):
+ c1 = self.engine.connect()
+
+ with patch.object(self.engine.pool, "logger") as logger:
+ c1_branch = c1.connect()
+ eq_(c1_branch.execute(select([1])).scalar(), 1)
+
+ self.engine.test_shutdown()
+
+ _assert_invalidated(c1_branch.execute, select([1]))
+ assert c1.invalidated
+ assert c1_branch.invalidated
+
+ c1_branch._revalidate_connection()
+ assert not c1.invalidated
+ assert not c1_branch.invalidated
+
+ assert "Invalidate connection" in logger.mock_calls[0][1][0]
+
+ def test_branched_invalidate_parent_to_branch(self):
+ c1 = self.engine.connect()
+
+ c1_branch = c1.connect()
+ eq_(c1_branch.execute(select([1])).scalar(), 1)
+
+ self.engine.test_shutdown()
+
+ _assert_invalidated(c1.execute, select([1]))
+ assert c1.invalidated
+ assert c1_branch.invalidated
+
+ c1._revalidate_connection()
+ assert not c1.invalidated
+ assert not c1_branch.invalidated
+
+ def test_branch_invalidate_state(self):
+ c1 = self.engine.connect()
+
+ c1_branch = c1.connect()
+
+ eq_(c1_branch.execute(select([1])).scalar(), 1)
+
+ self.engine.test_shutdown()
+
+ _assert_invalidated(c1_branch.execute, select([1]))
+ assert not c1_branch.closed
+ assert not c1_branch._connection_is_valid
+
def test_ensure_is_disconnect_gets_connection(self):
def is_disconnect(e, conn, cursor):
# connection is still present
diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py
index 8a5303642..b3b17e75a 100644
--- a/test/engine/test_transaction.py
+++ b/test/engine/test_transaction.py
@@ -133,6 +133,91 @@ class TransactionTest(fixtures.TestBase):
finally:
connection.close()
+ def test_branch_nested_rollback(self):
+ connection = testing.db.connect()
+ try:
+ connection.begin()
+ branched = connection.connect()
+ assert branched.in_transaction()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ nested = branched.begin()
+ branched.execute(users.insert(), user_id=2, user_name='user2')
+ nested.rollback()
+ assert not connection.in_transaction()
+ eq_(connection.scalar("select count(*) from query_users"), 0)
+
+ finally:
+ connection.close()
+
+ def test_branch_autorollback(self):
+ connection = testing.db.connect()
+ try:
+ branched = connection.connect()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ try:
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ except exc.DBAPIError:
+ pass
+ finally:
+ connection.close()
+
+ def test_branch_orig_rollback(self):
+ connection = testing.db.connect()
+ try:
+ branched = connection.connect()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ nested = branched.begin()
+ assert branched.in_transaction()
+ branched.execute(users.insert(), user_id=2, user_name='user2')
+ nested.rollback()
+ eq_(connection.scalar("select count(*) from query_users"), 1)
+
+ finally:
+ connection.close()
+
+ def test_branch_autocommit(self):
+ connection = testing.db.connect()
+ try:
+ branched = connection.connect()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ finally:
+ connection.close()
+ eq_(testing.db.scalar("select count(*) from query_users"), 1)
+
+ @testing.requires.savepoints
+ def test_branch_savepoint_rollback(self):
+ connection = testing.db.connect()
+ try:
+ trans = connection.begin()
+ branched = connection.connect()
+ assert branched.in_transaction()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ nested = branched.begin_nested()
+ branched.execute(users.insert(), user_id=2, user_name='user2')
+ nested.rollback()
+ assert connection.in_transaction()
+ trans.commit()
+ eq_(connection.scalar("select count(*) from query_users"), 1)
+
+ finally:
+ connection.close()
+
+ @testing.requires.two_phase_transactions
+ def test_branch_twophase_rollback(self):
+ connection = testing.db.connect()
+ try:
+ branched = connection.connect()
+ assert not branched.in_transaction()
+ branched.execute(users.insert(), user_id=1, user_name='user1')
+ nested = branched.begin_twophase()
+ branched.execute(users.insert(), user_id=2, user_name='user2')
+ nested.rollback()
+ assert not connection.in_transaction()
+ eq_(connection.scalar("select count(*) from query_users"), 1)
+
+ finally:
+ connection.close()
+
def test_retains_through_options(self):
connection = testing.db.connect()
try:
@@ -1126,139 +1211,6 @@ class TLTransactionTest(fixtures.TestBase):
order_by(users.c.user_id)).fetchall(),
[(1, ), (2, )])
-counters = None
-
-
-class ForUpdateTest(fixtures.TestBase):
- __requires__ = 'ad_hoc_engines',
- __backend__ = True
-
- @classmethod
- def setup_class(cls):
- global counters, metadata
- metadata = MetaData()
- counters = Table('forupdate_counters', metadata,
- Column('counter_id', INT, primary_key=True),
- Column('counter_value', INT),
- test_needs_acid=True)
- counters.create(testing.db)
-
- def teardown(self):
- testing.db.execute(counters.delete()).close()
-
- @classmethod
- def teardown_class(cls):
- counters.drop(testing.db)
-
- def increment(self, count, errors, update_style=True, delay=0.005):
- con = testing.db.connect()
- sel = counters.select(for_update=update_style,
- whereclause=counters.c.counter_id == 1)
- for i in range(count):
- trans = con.begin()
- try:
- existing = con.execute(sel).first()
- incr = existing['counter_value'] + 1
- time.sleep(delay)
- con.execute(counters.update(counters.c.counter_id == 1,
- values={'counter_value': incr}))
- time.sleep(delay)
- readback = con.execute(sel).first()
- if readback['counter_value'] != incr:
- raise AssertionError('Got %s post-update, expected '
- '%s' % (readback['counter_value'], incr))
- trans.commit()
- except Exception as e:
- trans.rollback()
- errors.append(e)
- break
- con.close()
-
- @testing.crashes('mssql', 'FIXME: unknown')
- @testing.crashes('firebird', 'FIXME: unknown')
- @testing.crashes('sybase', 'FIXME: unknown')
- @testing.requires.independent_connections
- def test_queued_update(self):
- """Test SELECT FOR UPDATE with concurrent modifications.
-
- Runs concurrent modifications on a single row in the users
- table, with each mutator trying to increment a value stored in
- user_name.
-
- """
-
- db = testing.db
- db.execute(counters.insert(), counter_id=1, counter_value=0)
- iterations, thread_count = 10, 5
- threads, errors = [], []
- for i in range(thread_count):
- thrd = threading.Thread(target=self.increment,
- args=(iterations, ),
- kwargs={'errors': errors,
- 'update_style': True})
- thrd.start()
- threads.append(thrd)
- for thrd in threads:
- thrd.join()
- assert not errors
- sel = counters.select(whereclause=counters.c.counter_id == 1)
- final = db.execute(sel).first()
- eq_(final['counter_value'], iterations * thread_count)
-
- def overlap(self, ids, errors, update_style):
-
- sel = counters.select(for_update=update_style,
- whereclause=counters.c.counter_id.in_(ids))
- con = testing.db.connect()
- trans = con.begin()
- try:
- rows = con.execute(sel).fetchall()
- time.sleep(0.50)
- trans.commit()
- except Exception as e:
- trans.rollback()
- errors.append(e)
- con.close()
-
- def _threaded_overlap(self, thread_count, groups, update_style=True, pool=5):
- db = testing.db
- for cid in range(pool - 1):
- db.execute(counters.insert(), counter_id=cid + 1,
- counter_value=0)
- errors, threads = [], []
- for i in range(thread_count):
- thrd = threading.Thread(target=self.overlap,
- args=(groups.pop(0), errors,
- update_style))
- time.sleep(0.20) # give the previous thread a chance to start
- # to ensure it gets a lock
- thrd.start()
- threads.append(thrd)
- for thrd in threads:
- thrd.join()
- return errors
-
- @testing.crashes('mssql', 'FIXME: unknown')
- @testing.crashes('firebird', 'FIXME: unknown')
- @testing.crashes('sybase', 'FIXME: unknown')
- @testing.requires.independent_connections
- def test_queued_select(self):
- """Simple SELECT FOR UPDATE conflict test"""
-
- errors = self._threaded_overlap(2, [(1, 2, 3), (3, 4, 5)])
- assert not errors
-
- @testing.crashes('mssql', 'FIXME: unknown')
- @testing.fails_on('mysql', 'No support for NOWAIT')
- @testing.crashes('firebird', 'FIXME: unknown')
- @testing.crashes('sybase', 'FIXME: unknown')
- @testing.requires.independent_connections
- def test_nowait_select(self):
- """Simple SELECT FOR UPDATE NOWAIT conflict test"""
-
- errors = self._threaded_overlap(2, [(1, 2, 3), (3, 4, 5)],
- update_style='nowait')
- assert errors
class IsolationLevelTest(fixtures.TestBase):
__requires__ = ('isolation_level', 'ad_hoc_engines')
diff --git a/test/ext/declarative/test_basic.py b/test/ext/declarative/test_basic.py
index e2c2af679..3fac39cac 100644
--- a/test/ext/declarative/test_basic.py
+++ b/test/ext/declarative/test_basic.py
@@ -1,6 +1,6 @@
from sqlalchemy.testing import eq_, assert_raises, \
- assert_raises_message, is_
+ assert_raises_message
from sqlalchemy.ext import declarative as decl
from sqlalchemy import exc
import sqlalchemy as sa
@@ -10,21 +10,21 @@ from sqlalchemy import MetaData, Integer, String, ForeignKey, \
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import relationship, create_session, class_mapper, \
joinedload, configure_mappers, backref, clear_mappers, \
- deferred, column_property, composite,\
- Session, properties
-from sqlalchemy.testing import eq_
-from sqlalchemy.util import classproperty, with_metaclass
-from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \
- ConcreteBase, synonym_for
+ column_property, composite, Session, properties
+from sqlalchemy.util import with_metaclass
+from sqlalchemy.ext.declarative import declared_attr, synonym_for
from sqlalchemy.testing import fixtures
-from sqlalchemy.testing.util import gc_collect
Base = None
+User = Address = None
+
+
class DeclarativeTestBase(fixtures.TestBase,
- testing.AssertsExecutionResults,
- testing.AssertsCompiledSQL):
+ testing.AssertsExecutionResults,
+ testing.AssertsCompiledSQL):
__dialect__ = 'default'
+
def setup(self):
global Base
Base = decl.declarative_base(testing.db)
@@ -34,13 +34,15 @@ class DeclarativeTestBase(fixtures.TestBase,
clear_mappers()
Base.metadata.drop_all()
+
class DeclarativeTest(DeclarativeTestBase):
+
def test_basic(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'users'
id = Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column('name', String(50))
addresses = relationship("Address", backref="user")
@@ -48,7 +50,7 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'addresses'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
email = Column(String(50), key='_email')
user_id = Column('user_id', Integer, ForeignKey('users.id'),
key='_user_id')
@@ -82,7 +84,7 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'users'
id = Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column('name', String(50))
addresses = relationship(util.u("Address"), backref="user")
@@ -90,7 +92,7 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'addresses'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
email = Column(String(50), key='_email')
user_id = Column('user_id', Integer, ForeignKey('users.id'),
key='_user_id')
@@ -120,8 +122,10 @@ class DeclarativeTest(DeclarativeTestBase):
__table_args__ = ()
def test_cant_add_columns(self):
- t = Table('t', Base.metadata, Column('id', Integer,
- primary_key=True), Column('data', String))
+ t = Table(
+ 't', Base.metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String))
def go():
class User(Base):
@@ -158,7 +162,6 @@ class DeclarativeTest(DeclarativeTestBase):
go
)
-
def test_column_repeated_under_prop(self):
def go():
class Foo(Base):
@@ -180,6 +183,7 @@ class DeclarativeTest(DeclarativeTestBase):
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
+
class B(Base):
__tablename__ = 'b'
id = Column(Integer, primary_key=True)
@@ -196,6 +200,7 @@ class DeclarativeTest(DeclarativeTestBase):
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
+
class B(Base):
__tablename__ = 'b'
id = Column(Integer, primary_key=True)
@@ -213,11 +218,12 @@ class DeclarativeTest(DeclarativeTestBase):
# metaclass to mock the way zope.interface breaks getattr()
class BrokenMeta(type):
+
def __getattribute__(self, attr):
if attr == 'xyzzy':
raise AttributeError('xyzzy')
else:
- return object.__getattribute__(self,attr)
+ return object.__getattribute__(self, attr)
# even though this class has an xyzzy attribute, getattr(cls,"xyzzy")
# fails
@@ -225,13 +231,13 @@ class DeclarativeTest(DeclarativeTestBase):
xyzzy = "magic"
# _as_declarative() inspects obj.__class__.__bases__
- class User(BrokenParent,fixtures.ComparableEntity):
+ class User(BrokenParent, fixtures.ComparableEntity):
__tablename__ = 'users'
id = Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column('name', String(50))
- decl.instrument_declarative(User,{},Base.metadata)
+ decl.instrument_declarative(User, {}, Base.metadata)
def test_reserved_identifiers(self):
def go1():
@@ -285,29 +291,28 @@ class DeclarativeTest(DeclarativeTestBase):
email = Column('email', String(50))
user_id = Column('user_id', Integer, ForeignKey('users.id'))
user = relationship("User", primaryjoin=user_id == User.id,
- backref="addresses")
+ backref="addresses")
assert mapperlib.Mapper._new_mappers is True
- u = User()
+ u = User() # noqa
assert User.addresses
assert mapperlib.Mapper._new_mappers is False
def test_string_dependency_resolution(self):
- from sqlalchemy.sql import desc
-
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'users'
id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
name = Column(String(50))
- addresses = relationship('Address',
- order_by='desc(Address.email)',
- primaryjoin='User.id==Address.user_id',
- foreign_keys='[Address.user_id]',
- backref=backref('user',
- primaryjoin='User.id==Address.user_id',
- foreign_keys='[Address.user_id]'))
+ addresses = relationship(
+ 'Address',
+ order_by='desc(Address.email)',
+ primaryjoin='User.id==Address.user_id',
+ foreign_keys='[Address.user_id]',
+ backref=backref('user',
+ primaryjoin='User.id==Address.user_id',
+ foreign_keys='[Address.user_id]'))
class Address(Base, fixtures.ComparableEntity):
@@ -319,14 +324,17 @@ class DeclarativeTest(DeclarativeTestBase):
Base.metadata.create_all()
sess = create_session()
- u1 = User(name='ed', addresses=[Address(email='abc'),
- Address(email='def'), Address(email='xyz')])
+ u1 = User(
+ name='ed', addresses=[
+ Address(email='abc'),
+ Address(email='def'), Address(email='xyz')])
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(sess.query(User).filter(User.name == 'ed').one(),
- User(name='ed', addresses=[Address(email='xyz'),
- Address(email='def'), Address(email='abc')]))
+ User(name='ed', addresses=[
+ Address(email='xyz'),
+ Address(email='def'), Address(email='abc')]))
class Foo(Base, fixtures.ComparableEntity):
@@ -340,7 +348,6 @@ class DeclarativeTest(DeclarativeTestBase):
"ColumnProperty", configure_mappers)
def test_string_dependency_resolution_synonym(self):
- from sqlalchemy.sql import desc
class User(Base, fixtures.ComparableEntity):
@@ -416,12 +423,13 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
b_id = Column(ForeignKey('b.id'))
- d = relationship("D",
- secondary="join(B, D, B.d_id == D.id)."
- "join(C, C.d_id == D.id)",
- primaryjoin="and_(A.b_id == B.id, A.id == C.a_id)",
- secondaryjoin="D.id == B.d_id",
- )
+ d = relationship(
+ "D",
+ secondary="join(B, D, B.d_id == D.id)."
+ "join(C, C.d_id == D.id)",
+ primaryjoin="and_(A.b_id == B.id, A.id == C.a_id)",
+ secondaryjoin="D.id == B.d_id",
+ )
class B(Base):
__tablename__ = 'b'
@@ -444,9 +452,9 @@ class DeclarativeTest(DeclarativeTestBase):
self.assert_compile(
s.query(A).join(A.d),
"SELECT a.id AS a_id, a.b_id AS a_b_id FROM a JOIN "
- "(b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id "
- "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
- "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
+ "(b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id "
+ "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
+ "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
)
def test_string_dependency_resolution_no_table(self):
@@ -474,6 +482,7 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
name = Column(String(50))
+
class Address(Base, fixtures.ComparableEntity):
__tablename__ = 'addresses'
@@ -481,7 +490,8 @@ class DeclarativeTest(DeclarativeTestBase):
test_needs_autoincrement=True)
email = Column(String(50))
user_id = Column(Integer)
- user = relationship("User",
+ user = relationship(
+ "User",
primaryjoin="remote(User.id)==foreign(Address.user_id)"
)
@@ -497,9 +507,9 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
- addresses = relationship('Address',
- primaryjoin='User.id==Address.user_id.prop.columns['
- '0]')
+ addresses = relationship(
+ 'Address',
+ primaryjoin='User.id==Address.user_id.prop.columns[0]')
class Address(Base, fixtures.ComparableEntity):
@@ -516,9 +526,10 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
- addresses = relationship('%s.Address' % __name__,
- primaryjoin='%s.User.id==%s.Address.user_id.prop.columns['
- '0]' % (__name__, __name__))
+ addresses = relationship(
+ '%s.Address' % __name__,
+ primaryjoin='%s.User.id==%s.Address.user_id.prop.columns[0]'
+ % (__name__, __name__))
class Address(Base, fixtures.ComparableEntity):
@@ -538,8 +549,8 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
name = Column(String(50))
addresses = relationship('Address',
- primaryjoin='User.id==Address.user_id',
- backref='user')
+ primaryjoin='User.id==Address.user_id',
+ backref='user')
class Address(Base, fixtures.ComparableEntity):
@@ -571,10 +582,11 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
name = Column(String(50))
- user_to_prop = Table('user_to_prop', Base.metadata,
- Column('user_id', Integer,
- ForeignKey('users.id')), Column('prop_id',
- Integer, ForeignKey('props.id')))
+ user_to_prop = Table(
+ 'user_to_prop', Base.metadata,
+ Column('user_id', Integer, ForeignKey('users.id')),
+ Column('prop_id', Integer, ForeignKey('props.id')))
+
configure_mappers()
assert class_mapper(User).get_property('props').secondary \
is user_to_prop
@@ -585,27 +597,29 @@ class DeclarativeTest(DeclarativeTestBase):
class User(Base):
__tablename__ = 'users'
- __table_args__ = {'schema':'fooschema'}
+ __table_args__ = {'schema': 'fooschema'}
id = Column(Integer, primary_key=True)
name = Column(String(50))
- props = relationship('Prop', secondary='fooschema.user_to_prop',
- primaryjoin='User.id==fooschema.user_to_prop.c.user_id',
- secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id',
- backref='users')
+ props = relationship(
+ 'Prop', secondary='fooschema.user_to_prop',
+ primaryjoin='User.id==fooschema.user_to_prop.c.user_id',
+ secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id',
+ backref='users')
class Prop(Base):
__tablename__ = 'props'
- __table_args__ = {'schema':'fooschema'}
+ __table_args__ = {'schema': 'fooschema'}
id = Column(Integer, primary_key=True)
name = Column(String(50))
- user_to_prop = Table('user_to_prop', Base.metadata,
- Column('user_id', Integer, ForeignKey('fooschema.users.id')),
- Column('prop_id',Integer, ForeignKey('fooschema.props.id')),
- schema='fooschema')
+ user_to_prop = Table(
+ 'user_to_prop', Base.metadata,
+ Column('user_id', Integer, ForeignKey('fooschema.users.id')),
+ Column('prop_id', Integer, ForeignKey('fooschema.props.id')),
+ schema='fooschema')
configure_mappers()
assert class_mapper(User).get_property('props').secondary \
@@ -618,9 +632,11 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'parent'
id = Column(Integer, primary_key=True)
name = Column(String)
- children = relationship("Child",
- primaryjoin="Parent.name==remote(foreign(func.lower(Child.name_upper)))"
- )
+ children = relationship(
+ "Child",
+ primaryjoin="Parent.name=="
+ "remote(foreign(func.lower(Child.name_upper)))"
+ )
class Child(Base):
__tablename__ = 'child'
@@ -667,8 +683,8 @@ class DeclarativeTest(DeclarativeTestBase):
test_needs_autoincrement=True)
name = Column(String(50))
addresses = relationship('Address', order_by=Address.email,
- foreign_keys=Address.user_id,
- remote_side=Address.user_id)
+ foreign_keys=Address.user_id,
+ remote_side=Address.user_id)
# get the mapper for User. User mapper will compile,
# "addresses" relationship will call upon Address.user_id for
@@ -681,14 +697,16 @@ class DeclarativeTest(DeclarativeTestBase):
class_mapper(User)
Base.metadata.create_all()
sess = create_session()
- u1 = User(name='ed', addresses=[Address(email='abc'),
- Address(email='xyz'), Address(email='def')])
+ u1 = User(name='ed', addresses=[
+ Address(email='abc'),
+ Address(email='xyz'), Address(email='def')])
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(sess.query(User).filter(User.name == 'ed').one(),
- User(name='ed', addresses=[Address(email='abc'),
- Address(email='def'), Address(email='xyz')]))
+ User(name='ed', addresses=[
+ Address(email='abc'),
+ Address(email='def'), Address(email='xyz')]))
def test_nice_dependency_error(self):
@@ -726,14 +744,16 @@ class DeclarativeTest(DeclarativeTestBase):
# the exception is preserved. Remains the
# same through repeated calls.
for i in range(3):
- assert_raises_message(sa.exc.InvalidRequestError,
- "^One or more mappers failed to initialize - "
- "can't proceed with initialization of other "
- "mappers. Original exception was: When initializing.*",
- configure_mappers)
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "^One or more mappers failed to initialize - "
+ "can't proceed with initialization of other "
+ "mappers. Original exception was: When initializing.*",
+ configure_mappers)
def test_custom_base(self):
class MyBase(object):
+
def foobar(self):
return "foobar"
Base = decl.declarative_base(cls=MyBase)
@@ -761,7 +781,7 @@ class DeclarativeTest(DeclarativeTestBase):
Base.metadata.create_all()
configure_mappers()
assert class_mapper(Detail).get_property('master'
- ).strategy.use_get
+ ).strategy.use_get
m1 = Master()
d1 = Detail(master=m1)
sess = create_session()
@@ -821,13 +841,15 @@ class DeclarativeTest(DeclarativeTestBase):
eq_(Address.__table__.c['_email'].name, 'email')
eq_(Address.__table__.c['_user_id'].name, 'user_id')
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(
+ name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
a1 = sess.query(Address).filter(Address.email == 'two').one()
eq_(a1, Address(email='two'))
eq_(a1.user, User(name='u1'))
@@ -842,7 +864,8 @@ class DeclarativeTest(DeclarativeTestBase):
class ASub(A):
brap = A.data
assert ASub.brap.property is A.data.property
- assert isinstance(ASub.brap.original_property, properties.SynonymProperty)
+ assert isinstance(
+ ASub.brap.original_property, properties.SynonymProperty)
def test_alt_name_attr_subclass_relationship_inline(self):
# [ticket:2900]
@@ -857,10 +880,12 @@ class DeclarativeTest(DeclarativeTestBase):
id = Column('id', Integer, primary_key=True)
configure_mappers()
+
class ASub(A):
brap = A.b
assert ASub.brap.property is A.b.property
- assert isinstance(ASub.brap.original_property, properties.SynonymProperty)
+ assert isinstance(
+ ASub.brap.original_property, properties.SynonymProperty)
ASub(brap=B())
def test_alt_name_attr_subclass_column_attrset(self):
@@ -881,6 +906,7 @@ class DeclarativeTest(DeclarativeTestBase):
b_id = Column(Integer, ForeignKey('b.id'))
b = relationship("B", backref="as_")
A.brap = A.b
+
class B(Base):
__tablename__ = 'b'
id = Column('id', Integer, primary_key=True)
@@ -889,7 +915,6 @@ class DeclarativeTest(DeclarativeTestBase):
assert isinstance(A.brap.original_property, properties.SynonymProperty)
A(brap=B())
-
def test_eager_order_by(self):
class Address(Base, fixtures.ComparableEntity):
@@ -910,14 +935,14 @@ class DeclarativeTest(DeclarativeTestBase):
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='two'),
- Address(email='one')])
+ Address(email='one')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(sess.query(User).options(joinedload(User.addresses)).all(),
[User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])])
+ Address(email='two')])])
def test_order_by_multi(self):
@@ -936,17 +961,17 @@ class DeclarativeTest(DeclarativeTestBase):
test_needs_autoincrement=True)
name = Column('name', String(50))
addresses = relationship('Address',
- order_by=(Address.email, Address.id))
+ order_by=(Address.email, Address.id))
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='two'),
- Address(email='one')])
+ Address(email='one')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
u = sess.query(User).filter(User.name == 'u1').one()
- a = u.addresses
+ u.addresses
def test_as_declarative(self):
@@ -971,13 +996,15 @@ class DeclarativeTest(DeclarativeTestBase):
decl.instrument_declarative(Address, reg, Base.metadata)
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(
+ name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
def test_custom_mapper_attribute(self):
@@ -1045,7 +1072,7 @@ class DeclarativeTest(DeclarativeTestBase):
__tablename__ = 'foo'
__table_args__ = ForeignKeyConstraint(['id'], ['foo.id'
- ])
+ ])
id = Column('id', Integer, primary_key=True)
assert_raises_message(sa.exc.ArgumentError,
'__table_args__ value must be a tuple, ', err)
@@ -1107,17 +1134,18 @@ class DeclarativeTest(DeclarativeTestBase):
User.address_count = \
sa.orm.column_property(sa.select([sa.func.count(Address.id)]).
- where(Address.user_id
- == User.id).as_scalar())
+ where(Address.user_id
+ == User.id).as_scalar())
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1', address_count=2,
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', address_count=2,
+ addresses=[Address(email='one'), Address(email='two')])])
def test_useless_declared_attr(self):
class Address(Base, fixtures.ComparableEntity):
@@ -1140,23 +1168,26 @@ class DeclarativeTest(DeclarativeTestBase):
def address_count(cls):
# this doesn't really gain us anything. but if
# one is used, lets have it function as expected...
- return sa.orm.column_property(sa.select([sa.func.count(Address.id)]).
- where(Address.user_id == cls.id))
+ return sa.orm.column_property(
+ sa.select([sa.func.count(Address.id)]).
+ where(Address.user_id == cls.id))
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1', address_count=2,
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', address_count=2,
+ addresses=[Address(email='one'), Address(email='two')])])
def test_declared_on_base_class(self):
class MyBase(Base):
__tablename__ = 'foo'
id = Column(Integer, primary_key=True)
+
@declared_attr
def somecol(cls):
return Column(Integer)
@@ -1213,18 +1244,19 @@ class DeclarativeTest(DeclarativeTestBase):
adr_count = \
sa.orm.column_property(
sa.select([sa.func.count(Address.id)],
- Address.user_id == id).as_scalar())
+ Address.user_id == id).as_scalar())
addresses = relationship(Address)
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1', adr_count=2,
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', adr_count=2,
+ addresses=[Address(email='one'), Address(email='two')])])
def test_column_properties_2(self):
@@ -1248,7 +1280,7 @@ class DeclarativeTest(DeclarativeTestBase):
eq_(set(User.__table__.c.keys()), set(['id', 'name']))
eq_(set(Address.__table__.c.keys()), set(['id', 'email',
- 'user_id']))
+ 'user_id']))
def test_deferred(self):
@@ -1274,86 +1306,91 @@ class DeclarativeTest(DeclarativeTestBase):
def test_composite_inline(self):
class AddressComposite(fixtures.ComparableEntity):
+
def __init__(self, street, state):
self.street = street
self.state = state
+
def __composite_values__(self):
return [self.street, self.state]
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'user'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
address = composite(AddressComposite,
- Column('street', String(50)),
- Column('state', String(2)),
- )
+ Column('street', String(50)),
+ Column('state', String(2)),
+ )
Base.metadata.create_all()
sess = Session()
sess.add(User(
- address=AddressComposite('123 anywhere street',
- 'MD')
- ))
+ address=AddressComposite('123 anywhere street',
+ 'MD')
+ ))
sess.commit()
eq_(
sess.query(User).all(),
[User(address=AddressComposite('123 anywhere street',
- 'MD'))]
+ 'MD'))]
)
def test_composite_separate(self):
class AddressComposite(fixtures.ComparableEntity):
+
def __init__(self, street, state):
self.street = street
self.state = state
+
def __composite_values__(self):
return [self.street, self.state]
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'user'
id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
street = Column(String(50))
state = Column(String(2))
address = composite(AddressComposite,
- street, state)
+ street, state)
Base.metadata.create_all()
sess = Session()
sess.add(User(
- address=AddressComposite('123 anywhere street',
- 'MD')
- ))
+ address=AddressComposite('123 anywhere street',
+ 'MD')
+ ))
sess.commit()
eq_(
sess.query(User).all(),
[User(address=AddressComposite('123 anywhere street',
- 'MD'))]
+ 'MD'))]
)
def test_mapping_to_join(self):
users = Table('users', Base.metadata,
- Column('id', Integer, primary_key=True)
- )
+ Column('id', Integer, primary_key=True)
+ )
addresses = Table('addresses', Base.metadata,
- Column('id', Integer, primary_key=True),
- Column('user_id', Integer, ForeignKey('users.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('user_id', Integer, ForeignKey('users.id'))
+ )
usersaddresses = sa.join(users, addresses, users.c.id
== addresses.c.user_id)
+
class User(Base):
__table__ = usersaddresses
- __table_args__ = {'primary_key':[users.c.id]}
+ __table_args__ = {'primary_key': [users.c.id]}
# need to use column_property for now
user_id = column_property(users.c.id, addresses.c.user_id)
address_id = addresses.c.id
assert User.__mapper__.get_property('user_id').columns[0] \
- is users.c.id
+ is users.c.id
assert User.__mapper__.get_property('user_id').columns[1] \
- is addresses.c.user_id
+ is addresses.c.user_id
def test_synonym_inline(self):
@@ -1372,7 +1409,7 @@ class DeclarativeTest(DeclarativeTestBase):
name = sa.orm.synonym('_name',
descriptor=property(_get_name,
- _set_name))
+ _set_name))
Base.metadata.create_all()
sess = create_session()
@@ -1381,7 +1418,7 @@ class DeclarativeTest(DeclarativeTestBase):
sess.add(u1)
sess.flush()
eq_(sess.query(User).filter(User.name == 'SOMENAME someuser'
- ).one(), u1)
+ ).one(), u1)
def test_synonym_no_descriptor(self):
from sqlalchemy.orm.properties import ColumnProperty
@@ -1434,7 +1471,7 @@ class DeclarativeTest(DeclarativeTestBase):
sess.add(u1)
sess.flush()
eq_(sess.query(User).filter(User.name == 'SOMENAME someuser'
- ).one(), u1)
+ ).one(), u1)
def test_reentrant_compile_via_foreignkey(self):
@@ -1465,13 +1502,14 @@ class DeclarativeTest(DeclarativeTestBase):
)
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
def test_relationship_reference(self):
@@ -1490,21 +1528,22 @@ class DeclarativeTest(DeclarativeTestBase):
test_needs_autoincrement=True)
name = Column('name', String(50))
addresses = relationship('Address', backref='user',
- primaryjoin=id == Address.user_id)
+ primaryjoin=id == Address.user_id)
User.address_count = \
sa.orm.column_property(sa.select([sa.func.count(Address.id)]).
- where(Address.user_id
- == User.id).as_scalar())
+ where(Address.user_id
+ == User.id).as_scalar())
Base.metadata.create_all()
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1', address_count=2,
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', address_count=2,
+ addresses=[Address(email='one'), Address(email='two')])])
def test_pk_with_fk_init(self):
@@ -1526,9 +1565,11 @@ class DeclarativeTest(DeclarativeTestBase):
def test_with_explicit_autoloaded(self):
meta = MetaData(testing.db)
- t1 = Table('t1', meta, Column('id', String(50),
+ t1 = Table(
+ 't1', meta,
+ Column('id', String(50),
primary_key=True, test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('data', String(50)))
meta.create_all()
try:
@@ -1541,7 +1582,7 @@ class DeclarativeTest(DeclarativeTestBase):
sess.add(m)
sess.flush()
eq_(t1.select().execute().fetchall(), [('someid', 'somedata'
- )])
+ )])
finally:
meta.drop_all()
@@ -1584,7 +1625,7 @@ class DeclarativeTest(DeclarativeTestBase):
op,
other,
**kw
- ):
+ ):
return op(self.upperself, other, **kw)
class User(Base, fixtures.ComparableEntity):
@@ -1612,7 +1653,7 @@ class DeclarativeTest(DeclarativeTestBase):
eq_(rt, u1)
sess.expunge_all()
rt = sess.query(User).filter(User.uc_name.startswith('SOMEUSE'
- )).one()
+ )).one()
eq_(rt, u1)
def test_duplicate_classes_in_base(self):
@@ -1631,7 +1672,6 @@ class DeclarativeTest(DeclarativeTestBase):
)
-
def _produce_test(inline, stringbased):
class ExplicitJoinTest(fixtures.MappedTest):
@@ -1657,35 +1697,43 @@ def _produce_test(inline, stringbased):
user_id = Column(Integer, ForeignKey('users.id'))
if inline:
if stringbased:
- user = relationship('User',
- primaryjoin='User.id==Address.user_id',
- backref='addresses')
+ user = relationship(
+ 'User',
+ primaryjoin='User.id==Address.user_id',
+ backref='addresses')
else:
user = relationship(User, primaryjoin=User.id
- == user_id, backref='addresses')
+ == user_id, backref='addresses')
if not inline:
configure_mappers()
if stringbased:
- Address.user = relationship('User',
- primaryjoin='User.id==Address.user_id',
- backref='addresses')
+ Address.user = relationship(
+ 'User',
+ primaryjoin='User.id==Address.user_id',
+ backref='addresses')
else:
- Address.user = relationship(User,
- primaryjoin=User.id == Address.user_id,
- backref='addresses')
+ Address.user = relationship(
+ User,
+ primaryjoin=User.id == Address.user_id,
+ backref='addresses')
@classmethod
def insert_data(cls):
- params = [dict(list(zip(('id', 'name'), column_values)))
- for column_values in [(7, 'jack'), (8, 'ed'), (9,
- 'fred'), (10, 'chuck')]]
+ params = [
+ dict(list(zip(('id', 'name'), column_values)))
+ for column_values in [
+ (7, 'jack'), (8, 'ed'),
+ (9, 'fred'), (10, 'chuck')]]
+
User.__table__.insert().execute(params)
- Address.__table__.insert().execute([dict(list(zip(('id',
- 'user_id', 'email'), column_values)))
- for column_values in [(1, 7, 'jack@bean.com'), (2,
- 8, 'ed@wood.com'), (3, 8, 'ed@bettyboop.com'), (4,
- 8, 'ed@lala.com'), (5, 9, 'fred@fred.com')]])
+ Address.__table__.insert().execute([
+ dict(list(zip(('id', 'user_id', 'email'), column_values)))
+ for column_values in [
+ (1, 7, 'jack@bean.com'),
+ (2, 8, 'ed@wood.com'),
+ (3, 8, 'ed@bettyboop.com'),
+ (4, 8, 'ed@lala.com'), (5, 9, 'fred@fred.com')]])
def test_aliased_join(self):
@@ -1699,13 +1747,14 @@ def _produce_test(inline, stringbased):
sess = create_session()
eq_(sess.query(User).join(User.addresses,
- aliased=True).filter(Address.email == 'ed@wood.com'
- ).filter(User.addresses.any(Address.email
- == 'jack@bean.com')).all(), [])
-
- ExplicitJoinTest.__name__ = 'ExplicitJoinTest%s%s' % (inline
- and 'Inline' or 'Separate', stringbased and 'String'
- or 'Literal')
+ aliased=True).filter(
+ Address.email == 'ed@wood.com').filter(
+ User.addresses.any(Address.email == 'jack@bean.com')).all(),
+ [])
+
+ ExplicitJoinTest.__name__ = 'ExplicitJoinTest%s%s' % (
+ inline and 'Inline' or 'Separate',
+ stringbased and 'String' or 'Literal')
return ExplicitJoinTest
for inline in True, False:
@@ -1713,4 +1762,3 @@ for inline in True, False:
testclass = _produce_test(inline, stringbased)
exec('%s = testclass' % testclass.__name__)
del testclass
-
diff --git a/test/ext/declarative/test_clsregistry.py b/test/ext/declarative/test_clsregistry.py
index e78a1abbe..535fd00b3 100644
--- a/test/ext/declarative/test_clsregistry.py
+++ b/test/ext/declarative/test_clsregistry.py
@@ -5,7 +5,9 @@ from sqlalchemy import exc, MetaData
from sqlalchemy.ext.declarative import clsregistry
import weakref
+
class MockClass(object):
+
def __init__(self, base, name):
self._decl_class_registry = base
tokens = name.split(".")
@@ -183,7 +185,7 @@ class ClsRegistryTest(fixtures.TestBase):
f1 = MockClass(base, "foo.bar.Foo")
clsregistry.add_class("Foo", f1)
reg = base['_sa_module_registry']
- mod_entry = reg['foo']['bar']
+ mod_entry = reg['foo']['bar'] # noqa
resolver = clsregistry._resolver(f1, MockProp())
resolver = resolver("foo")
assert_raises_message(
@@ -232,4 +234,3 @@ class ClsRegistryTest(fixtures.TestBase):
del f4
gc_collect()
assert 'single' not in reg
-
diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py
index edff4421e..6ea37e4d3 100644
--- a/test/ext/declarative/test_inheritance.py
+++ b/test/ext/declarative/test_inheritance.py
@@ -10,12 +10,14 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \
configure_mappers, clear_mappers, \
polymorphic_union, deferred, Session
from sqlalchemy.ext.declarative import declared_attr, AbstractConcreteBase, \
- ConcreteBase, has_inherited_table
-from sqlalchemy.testing import fixtures
+ ConcreteBase, has_inherited_table
+from sqlalchemy.testing import fixtures, mock
Base = None
+
class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
+
def setup(self):
global Base
Base = decl.declarative_base(testing.db)
@@ -25,6 +27,7 @@ class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
clear_mappers()
Base.metadata.drop_all()
+
class DeclarativeInheritanceTest(DeclarativeTestBase):
def test_we_must_copy_mapper_args(self):
@@ -65,7 +68,6 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
assert class_mapper(Person).version_id_col == 'a'
assert class_mapper(Person).include_properties == set(['id', 'a', 'b'])
-
def test_custom_join_condition(self):
class Foo(Base):
@@ -123,21 +125,23 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
Base.metadata.create_all()
sess = create_session()
- c1 = Company(name='MegaCorp, Inc.',
- employees=[Engineer(name='dilbert',
- primary_language='java'), Engineer(name='wally',
- primary_language='c++'), Manager(name='dogbert',
- golf_swing='fore!')])
+ c1 = Company(
+ name='MegaCorp, Inc.',
+ employees=[
+ Engineer(name='dilbert', primary_language='java'),
+ Engineer(name='wally', primary_language='c++'),
+ Manager(name='dogbert', golf_swing='fore!')])
+
c2 = Company(name='Elbonia, Inc.',
employees=[Engineer(name='vlad',
- primary_language='cobol')])
+ primary_language='cobol')])
sess.add(c1)
sess.add(c2)
sess.flush()
sess.expunge_all()
eq_(sess.query(Company).filter(Company.employees.of_type(Engineer).
- any(Engineer.primary_language
- == 'cobol')).first(), c2)
+ any(Engineer.primary_language
+ == 'cobol')).first(), c2)
# ensure that the Manager mapper was compiled with the Manager id
# column as higher priority. this ensures that "Manager.id"
@@ -145,8 +149,8 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
# table (reversed from 0.6's behavior.)
eq_(
- Manager.id.property.columns,
- [Manager.__table__.c.id, Person.__table__.c.id]
+ Manager.id.property.columns,
+ [Manager.__table__.c.id, Person.__table__.c.id]
)
# assert that the "id" column is available without a second
@@ -157,13 +161,13 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
def go():
assert sess.query(Manager).filter(Manager.name == 'dogbert'
- ).one().id
+ ).one().id
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
assert sess.query(Person).filter(Manager.name == 'dogbert'
- ).one().id
+ ).one().id
self.assert_sql_count(testing.db, go, 1)
@@ -186,7 +190,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
primary_key=True)
Engineer.primary_language = Column('primary_language',
- String(50))
+ String(50))
Base.metadata.create_all()
sess = create_session()
e1 = Engineer(primary_language='java', name='dilbert')
@@ -194,7 +198,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.flush()
sess.expunge_all()
eq_(sess.query(Person).first(),
- Engineer(primary_language='java', name='dilbert'))
+ Engineer(primary_language='java', name='dilbert'))
def test_add_parentcol_after_the_fact(self):
@@ -258,8 +262,8 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.add(e1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(Person).first(), Admin(primary_language='java',
- name='dilbert', workstation='foo'))
+ eq_(sess.query(Person).first(),
+ Admin(primary_language='java', name='dilbert', workstation='foo'))
def test_subclass_mixin(self):
@@ -331,26 +335,25 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
class PlanBooking(Booking):
__tablename__ = 'plan_booking'
id = Column(Integer, ForeignKey(Booking.id),
- primary_key=True)
+ primary_key=True)
# referencing PlanBooking.id gives us the column
# on plan_booking, not booking
class FeatureBooking(Booking):
__tablename__ = 'feature_booking'
id = Column(Integer, ForeignKey(Booking.id),
- primary_key=True)
+ primary_key=True)
plan_booking_id = Column(Integer,
- ForeignKey(PlanBooking.id))
+ ForeignKey(PlanBooking.id))
plan_booking = relationship(PlanBooking,
- backref='feature_bookings')
+ backref='feature_bookings')
assert FeatureBooking.__table__.c.plan_booking_id.\
- references(PlanBooking.__table__.c.id)
+ references(PlanBooking.__table__.c.id)
assert FeatureBooking.__table__.c.id.\
- references(Booking.__table__.c.id)
-
+ references(Booking.__table__.c.id)
def test_single_colsonbase(self):
"""test single inheritance where all the columns are on the base
@@ -387,23 +390,26 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
Base.metadata.create_all()
sess = create_session()
- c1 = Company(name='MegaCorp, Inc.',
- employees=[Engineer(name='dilbert',
- primary_language='java'), Engineer(name='wally',
- primary_language='c++'), Manager(name='dogbert',
- golf_swing='fore!')])
+ c1 = Company(
+ name='MegaCorp, Inc.',
+ employees=[
+ Engineer(name='dilbert', primary_language='java'),
+ Engineer(name='wally', primary_language='c++'),
+ Manager(name='dogbert', golf_swing='fore!')])
+
c2 = Company(name='Elbonia, Inc.',
employees=[Engineer(name='vlad',
- primary_language='cobol')])
+ primary_language='cobol')])
sess.add(c1)
sess.add(c2)
sess.flush()
sess.expunge_all()
eq_(sess.query(Person).filter(Engineer.primary_language
- == 'cobol').first(), Engineer(name='vlad'))
+ == 'cobol').first(),
+ Engineer(name='vlad'))
eq_(sess.query(Company).filter(Company.employees.of_type(Engineer).
- any(Engineer.primary_language
- == 'cobol')).first(), c2)
+ any(Engineer.primary_language
+ == 'cobol')).first(), c2)
def test_single_colsonsub(self):
"""test single inheritance where the columns are local to their
@@ -470,15 +476,17 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.flush()
sess.expunge_all()
eq_(sess.query(Person).filter(Engineer.primary_language
- == 'cobol').first(), Engineer(name='vlad'))
+ == 'cobol').first(),
+ Engineer(name='vlad'))
eq_(sess.query(Company).filter(Company.employees.of_type(Engineer).
- any(Engineer.primary_language
- == 'cobol')).first(), c2)
+ any(Engineer.primary_language
+ == 'cobol')).first(), c2)
eq_(sess.query(Engineer).filter_by(primary_language='cobol'
- ).one(), Engineer(name='vlad', primary_language='cobol'))
+ ).one(),
+ Engineer(name='vlad', primary_language='cobol'))
@testing.skip_if(lambda: testing.against('oracle'),
- "Test has an empty insert in it at the moment")
+ "Test has an empty insert in it at the moment")
def test_columns_single_inheritance_conflict_resolution(self):
"""Test that a declared_attr can return the existing column and it will
be ignored. this allows conditional columns to be added.
@@ -491,25 +499,29 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
class Engineer(Person):
+
"""single table inheritance"""
@declared_attr
def target_id(cls):
- return cls.__table__.c.get('target_id',
- Column(Integer, ForeignKey('other.id'))
- )
+ return cls.__table__.c.get(
+ 'target_id',
+ Column(Integer, ForeignKey('other.id')))
+
@declared_attr
def target(cls):
return relationship("Other")
class Manager(Person):
+
"""single table inheritance"""
@declared_attr
def target_id(cls):
- return cls.__table__.c.get('target_id',
- Column(Integer, ForeignKey('other.id'))
- )
+ return cls.__table__.c.get(
+ 'target_id',
+ Column(Integer, ForeignKey('other.id')))
+
@declared_attr
def target(cls):
return relationship("Other")
@@ -534,11 +546,10 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
Engineer(target=o1),
Manager(target=o2),
Manager(target=o1)
- ])
+ ])
session.commit()
eq_(session.query(Engineer).first().target, o1)
-
def test_joined_from_single(self):
class Company(Base, fixtures.ComparableEntity):
@@ -595,12 +606,13 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.expunge_all()
eq_(sess.query(Person).with_polymorphic(Engineer).
filter(Engineer.primary_language
- == 'cobol').first(), Engineer(name='vlad'))
+ == 'cobol').first(), Engineer(name='vlad'))
eq_(sess.query(Company).filter(Company.employees.of_type(Engineer).
- any(Engineer.primary_language
- == 'cobol')).first(), c2)
+ any(Engineer.primary_language
+ == 'cobol')).first(), c2)
eq_(sess.query(Engineer).filter_by(primary_language='cobol'
- ).one(), Engineer(name='vlad', primary_language='cobol'))
+ ).one(),
+ Engineer(name='vlad', primary_language='cobol'))
def test_single_from_joined_colsonsub(self):
class Person(Base, fixtures.ComparableEntity):
@@ -661,7 +673,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
eq_(sess.query(Person).all(), [Person(name='ratbert')])
sess.expunge_all()
person = sess.query(Person).filter(Person.name == 'ratbert'
- ).one()
+ ).one()
assert 'name' not in person.__dict__
def test_single_fksonsub(self):
@@ -683,7 +695,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
__mapper_args__ = {'polymorphic_identity': 'engineer'}
primary_language_id = Column(Integer,
- ForeignKey('languages.id'))
+ ForeignKey('languages.id'))
primary_language = relationship('Language')
class Language(Base, fixtures.ComparableEntity):
@@ -706,19 +718,19 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
sess.expunge_all()
eq_(sess.query(Person).filter(Engineer.primary_language.has(
Language.name
- == 'cobol')).first(), Engineer(name='vlad',
- primary_language=Language(name='cobol')))
+ == 'cobol')).first(),
+ Engineer(name='vlad', primary_language=Language(name='cobol')))
eq_(sess.query(Engineer).filter(Engineer.primary_language.has(
Language.name
- == 'cobol')).one(), Engineer(name='vlad',
- primary_language=Language(name='cobol')))
+ == 'cobol')).one(),
+ Engineer(name='vlad', primary_language=Language(name='cobol')))
eq_(sess.query(Person).join(Engineer.primary_language).order_by(
Language.name).all(),
[Engineer(name='vlad',
- primary_language=Language(name='cobol')),
- Engineer(name='wally', primary_language=Language(name='cpp'
- )), Engineer(name='dilbert',
- primary_language=Language(name='java'))])
+ primary_language=Language(name='cobol')),
+ Engineer(name='wally', primary_language=Language(name='cpp'
+ )),
+ Engineer(name='dilbert', primary_language=Language(name='java'))])
def test_single_three_levels(self):
@@ -810,11 +822,11 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
__mapper_args__ = {'polymorphic_identity': 'engineer'}
primary_language = Column('primary_language',
- String(50))
+ String(50))
foo_bar = Column(Integer, primary_key=True)
assert_raises_message(sa.exc.ArgumentError,
- 'place primary key', go)
+ 'place primary key', go)
def test_single_no_table_args(self):
@@ -832,7 +844,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
__mapper_args__ = {'polymorphic_identity': 'engineer'}
primary_language = Column('primary_language',
- String(50))
+ String(50))
# this should be on the Person class, as this is single
# table inheritance, which is why we test that this
@@ -849,6 +861,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
a_1 = A
+
class A(a_1):
__tablename__ = 'b'
id = Column(Integer(), ForeignKey(a_1.id), primary_key=True)
@@ -857,6 +870,7 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
class OverlapColPrecedenceTest(DeclarativeTestBase):
+
"""test #1892 cases when declarative does column precedence."""
def _run_test(self, Engineer, e_id, p_id):
@@ -895,7 +909,7 @@ class OverlapColPrecedenceTest(DeclarativeTestBase):
class Engineer(Person):
__tablename__ = 'engineer'
id = Column("eid", Integer, ForeignKey('person.id'),
- primary_key=True)
+ primary_key=True)
self._run_test(Engineer, "eid", "id")
@@ -907,15 +921,18 @@ class OverlapColPrecedenceTest(DeclarativeTestBase):
class Engineer(Person):
__tablename__ = 'engineer'
id = Column("eid", Integer, ForeignKey('person.pid'),
- primary_key=True)
+ primary_key=True)
self._run_test(Engineer, "eid", "pid")
from test.orm.test_events import _RemoveListeners
+
+
class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
+
def _roundtrip(self, Employee, Manager, Engineer, Boss,
- polymorphic=True, explicit_type=False):
+ polymorphic=True, explicit_type=False):
Base.metadata.create_all()
sess = create_session()
e1 = Engineer(name='dilbert', primary_language='java')
@@ -932,7 +949,7 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
assert_raises_message(
AttributeError,
"does not implement attribute .?'type' "
- "at the instance level.",
+ "at the instance level.",
getattr, obj, "type"
)
else:
@@ -946,37 +963,38 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
if polymorphic:
eq_(sess.query(Employee).order_by(Employee.name).all(),
[Engineer(name='dilbert'), Manager(name='dogbert'),
- Boss(name='pointy haired'), Engineer(name='vlad'), Engineer(name='wally')])
+ Boss(name='pointy haired'),
+ Engineer(name='vlad'), Engineer(name='wally')])
else:
eq_(sess.query(Engineer).order_by(Engineer.name).all(),
[Engineer(name='dilbert'), Engineer(name='vlad'),
- Engineer(name='wally')])
+ Engineer(name='wally')])
eq_(sess.query(Manager).all(), [Manager(name='dogbert')])
eq_(sess.query(Boss).all(), [Boss(name='pointy haired')])
-
def test_explicit(self):
- engineers = Table('engineers', Base.metadata, Column('id',
- Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('primary_language', String(50)))
+ engineers = Table(
+ 'engineers', Base.metadata,
+ Column('id',
+ Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('primary_language', String(50)))
managers = Table('managers', Base.metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('golf_swing', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('golf_swing', String(50))
+ )
boss = Table('boss', Base.metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- Column('golf_swing', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('golf_swing', String(50))
+ )
punion = polymorphic_union({
- 'engineer': engineers,
- 'manager': managers,
- 'boss': boss}, 'type', 'punion')
+ 'engineer': engineers,
+ 'manager': managers,
+ 'boss': boss}, 'type', 'punion')
class Employee(Base, fixtures.ComparableEntity):
@@ -1047,31 +1065,31 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
__mapper_args__ = {
- 'polymorphic_identity': 'manager',
- 'concrete': True}
+ 'polymorphic_identity': 'manager',
+ 'concrete': True}
class Boss(Manager):
__tablename__ = 'boss'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
__mapper_args__ = {
- 'polymorphic_identity': 'boss',
- 'concrete': True}
+ 'polymorphic_identity': 'boss',
+ 'concrete': True}
class Engineer(Employee):
__tablename__ = 'engineer'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
primary_language = Column(String(40))
__mapper_args__ = {'polymorphic_identity': 'engineer',
- 'concrete': True}
+ 'concrete': True}
self._roundtrip(Employee, Manager, Engineer, Boss)
@@ -1079,42 +1097,42 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
class Employee(ConcreteBase, Base, fixtures.ComparableEntity):
__tablename__ = 'employee'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
__mapper_args__ = {
- 'polymorphic_identity': 'employee',
- 'concrete': True}
+ 'polymorphic_identity': 'employee',
+ 'concrete': True}
+
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
__mapper_args__ = {
- 'polymorphic_identity': 'manager',
- 'concrete': True}
+ 'polymorphic_identity': 'manager',
+ 'concrete': True}
class Boss(Manager):
__tablename__ = 'boss'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
__mapper_args__ = {
- 'polymorphic_identity': 'boss',
- 'concrete': True}
+ 'polymorphic_identity': 'boss',
+ 'concrete': True}
class Engineer(Employee):
__tablename__ = 'engineer'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
primary_language = Column(String(40))
__mapper_args__ = {'polymorphic_identity': 'engineer',
- 'concrete': True}
+ 'concrete': True}
self._roundtrip(Employee, Manager, Engineer, Boss)
-
def test_has_inherited_table_doesnt_consider_base(self):
class A(Base):
__tablename__ = 'a'
@@ -1140,7 +1158,7 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
ret = {
'polymorphic_identity': 'default',
'polymorphic_on': cls.type,
- }
+ }
else:
ret = {'polymorphic_identity': cls.__name__}
return ret
@@ -1161,7 +1179,7 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
class Manager(Employee):
__tablename__ = 'manager'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
@@ -1170,13 +1188,13 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
return "manager"
__mapper_args__ = {
- 'polymorphic_identity': "manager",
- 'concrete': True}
+ 'polymorphic_identity': "manager",
+ 'concrete': True}
class Boss(Manager):
__tablename__ = 'boss'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
golf_swing = Column(String(40))
@@ -1185,13 +1203,13 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
return "boss"
__mapper_args__ = {
- 'polymorphic_identity': "boss",
- 'concrete': True}
+ 'polymorphic_identity': "boss",
+ 'concrete': True}
class Engineer(Employee):
__tablename__ = 'engineer'
employee_id = Column(Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
name = Column(String(50))
primary_language = Column(String(40))
@@ -1199,26 +1217,30 @@ class ConcreteInhTest(_RemoveListeners, DeclarativeTestBase):
def type(self):
return "engineer"
__mapper_args__ = {'polymorphic_identity': "engineer",
- 'concrete': True}
+ 'concrete': True}
self._roundtrip(Employee, Manager, Engineer, Boss, explicit_type=True)
-class ConcreteExtensionConfigTest(_RemoveListeners, testing.AssertsCompiledSQL, DeclarativeTestBase):
+
+class ConcreteExtensionConfigTest(
+ _RemoveListeners, testing.AssertsCompiledSQL, DeclarativeTestBase):
__dialect__ = 'default'
def test_classreg_setup(self):
class A(Base, fixtures.ComparableEntity):
__tablename__ = 'a'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer,
+ primary_key=True, test_needs_autoincrement=True)
data = Column(String(50))
collection = relationship("BC", primaryjoin="BC.a_id == A.id",
- collection_class=set)
+ collection_class=set)
class BC(AbstractConcreteBase, Base, fixtures.ComparableEntity):
pass
class B(BC):
__tablename__ = 'b'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer,
+ primary_key=True, test_needs_autoincrement=True)
a_id = Column(Integer, ForeignKey('a.id'))
data = Column(String(50))
@@ -1230,7 +1252,8 @@ class ConcreteExtensionConfigTest(_RemoveListeners, testing.AssertsCompiledSQL,
class C(BC):
__tablename__ = 'c'
- id = Column(Integer, primary_key=True, test_needs_autoincrement=True)
+ id = Column(Integer,
+ primary_key=True, test_needs_autoincrement=True)
a_id = Column(Integer, ForeignKey('a.id'))
data = Column(String(50))
c_data = Column(String(50))
@@ -1274,8 +1297,123 @@ class ConcreteExtensionConfigTest(_RemoveListeners, testing.AssertsCompiledSQL,
sess.query(A).join(A.collection),
"SELECT a.id AS a_id, a.data AS a_data FROM a JOIN "
"(SELECT c.id AS id, c.a_id AS a_id, c.data AS data, "
- "c.c_data AS c_data, CAST(NULL AS VARCHAR(50)) AS b_data, "
- "'c' AS type FROM c UNION ALL SELECT b.id AS id, b.a_id AS a_id, "
- "b.data AS data, CAST(NULL AS VARCHAR(50)) AS c_data, "
- "b.b_data AS b_data, 'b' AS type FROM b) AS pjoin ON pjoin.a_id = a.id"
+ "c.c_data AS c_data, CAST(NULL AS VARCHAR(50)) AS b_data, "
+ "'c' AS type FROM c UNION ALL SELECT b.id AS id, b.a_id AS a_id, "
+ "b.data AS data, CAST(NULL AS VARCHAR(50)) AS c_data, "
+ "b.b_data AS b_data, 'b' AS type FROM b) AS pjoin "
+ "ON pjoin.a_id = a.id"
)
+
+ def test_prop_on_base(self):
+ """test [ticket:2670] """
+
+ counter = mock.Mock()
+
+ class Something(Base):
+ __tablename__ = 'something'
+ id = Column(Integer, primary_key=True)
+
+ class AbstractConcreteAbstraction(AbstractConcreteBase, Base):
+ id = Column(Integer, primary_key=True)
+ x = Column(Integer)
+ y = Column(Integer)
+
+ @declared_attr
+ def something_id(cls):
+ return Column(ForeignKey(Something.id))
+
+ @declared_attr
+ def something(cls):
+ counter(cls, "something")
+ return relationship("Something")
+
+ @declared_attr
+ def something_else(cls):
+ counter(cls, "something_else")
+ return relationship("Something")
+
+ class ConcreteConcreteAbstraction(AbstractConcreteAbstraction):
+ __tablename__ = 'cca'
+ __mapper_args__ = {
+ 'polymorphic_identity': 'ccb',
+ 'concrete': True}
+
+ # concrete is mapped, the abstract base is not (yet)
+ assert ConcreteConcreteAbstraction.__mapper__
+ assert not hasattr(AbstractConcreteAbstraction, '__mapper__')
+
+ session = Session()
+ self.assert_compile(
+ session.query(ConcreteConcreteAbstraction).filter(
+ ConcreteConcreteAbstraction.something.has(id=1)),
+ "SELECT cca.id AS cca_id, cca.x AS cca_x, cca.y AS cca_y, "
+ "cca.something_id AS cca_something_id FROM cca WHERE EXISTS "
+ "(SELECT 1 FROM something WHERE something.id = cca.something_id "
+ "AND something.id = :id_1)"
+ )
+
+ # now it is
+ assert AbstractConcreteAbstraction.__mapper__
+
+ self.assert_compile(
+ session.query(ConcreteConcreteAbstraction).filter(
+ ConcreteConcreteAbstraction.something_else.has(id=1)),
+ "SELECT cca.id AS cca_id, cca.x AS cca_x, cca.y AS cca_y, "
+ "cca.something_id AS cca_something_id FROM cca WHERE EXISTS "
+ "(SELECT 1 FROM something WHERE something.id = cca.something_id "
+ "AND something.id = :id_1)"
+ )
+
+ self.assert_compile(
+ session.query(AbstractConcreteAbstraction).filter(
+ AbstractConcreteAbstraction.something.has(id=1)),
+ "SELECT pjoin.id AS pjoin_id, pjoin.x AS pjoin_x, "
+ "pjoin.y AS pjoin_y, pjoin.something_id AS pjoin_something_id, "
+ "pjoin.type AS pjoin_type FROM "
+ "(SELECT cca.id AS id, cca.x AS x, cca.y AS y, "
+ "cca.something_id AS something_id, 'ccb' AS type FROM cca) "
+ "AS pjoin WHERE EXISTS (SELECT 1 FROM something "
+ "WHERE something.id = pjoin.something_id AND something.id = :id_1)"
+ )
+
+ self.assert_compile(
+ session.query(AbstractConcreteAbstraction).filter(
+ AbstractConcreteAbstraction.something_else.has(id=1)),
+ "SELECT pjoin.id AS pjoin_id, pjoin.x AS pjoin_x, "
+ "pjoin.y AS pjoin_y, pjoin.something_id AS pjoin_something_id, "
+ "pjoin.type AS pjoin_type FROM "
+ "(SELECT cca.id AS id, cca.x AS x, cca.y AS y, "
+ "cca.something_id AS something_id, 'ccb' AS type FROM cca) "
+ "AS pjoin WHERE EXISTS (SELECT 1 FROM something "
+ "WHERE something.id = pjoin.something_id AND something.id = :id_1)"
+ )
+
+ def test_abstract_in_hierarchy(self):
+ class Document(Base, AbstractConcreteBase):
+ doctype = Column(String)
+
+ class ContactDocument(Document):
+ __abstract__ = True
+
+ send_method = Column(String)
+
+ class ActualDocument(ContactDocument):
+ __tablename__ = 'actual_documents'
+ __mapper_args__ = {
+ 'concrete': True,
+ 'polymorphic_identity': 'actual'}
+
+ id = Column(Integer, primary_key=True)
+
+ configure_mappers()
+ session = Session()
+ self.assert_compile(
+ session.query(Document),
+ "SELECT pjoin.doctype AS pjoin_doctype, "
+ "pjoin.send_method AS pjoin_send_method, "
+ "pjoin.id AS pjoin_id, pjoin.type AS pjoin_type "
+ "FROM (SELECT actual_documents.doctype AS doctype, "
+ "actual_documents.send_method AS send_method, "
+ "actual_documents.id AS id, 'actual' AS type "
+ "FROM actual_documents) AS pjoin"
+ ) \ No newline at end of file
diff --git a/test/ext/declarative/test_mixin.py b/test/ext/declarative/test_mixin.py
index d3c2ff982..db86927a1 100644
--- a/test/ext/declarative/test_mixin.py
+++ b/test/ext/declarative/test_mixin.py
@@ -3,19 +3,21 @@ from sqlalchemy.testing import eq_, assert_raises, \
from sqlalchemy.ext import declarative as decl
import sqlalchemy as sa
from sqlalchemy import testing
-from sqlalchemy import Integer, String, ForeignKey
+from sqlalchemy import Integer, String, ForeignKey, select, func
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import relationship, create_session, class_mapper, \
configure_mappers, clear_mappers, \
- deferred, column_property, \
- Session
+ deferred, column_property, Session, base as orm_base
from sqlalchemy.util import classproperty
from sqlalchemy.ext.declarative import declared_attr
-from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import fixtures, mock
+from sqlalchemy.testing.util import gc_collect
Base = None
+
class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
+
def setup(self):
global Base
Base = decl.declarative_base(testing.db)
@@ -25,6 +27,7 @@ class DeclarativeTestBase(fixtures.TestBase, testing.AssertsExecutionResults):
clear_mappers()
Base.metadata.drop_all()
+
class DeclarativeMixinTest(DeclarativeTestBase):
def test_simple(self):
@@ -157,6 +160,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_inherited(self):
class MyMixin:
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@@ -169,6 +173,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_classproperty_still_works(self):
class MyMixin(object):
+
@classproperty
def __tablename__(cls):
return cls.__name__.lower()
@@ -182,6 +187,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_not_inherited(self):
class MyMixin:
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@@ -195,11 +201,13 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_inheritance_order(self):
class MyMixin1:
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower() + '1'
class MyMixin2:
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower() + '2'
@@ -212,6 +220,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_dependent_on_subclass(self):
class MyHistoryMixin:
+
@declared_attr
def __tablename__(cls):
return cls.parent_name + '_changelog'
@@ -236,6 +245,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_args_inherited_descriptor(self):
class MyMixin:
+
@declared_attr
def __table_args__(cls):
return {'info': cls.__name__}
@@ -289,7 +299,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
assert Specific.bar.prop is General.bar.prop
@testing.skip_if(lambda: testing.against('oracle'),
- "Test has an empty insert in it at the moment")
+ "Test has an empty insert in it at the moment")
def test_columns_single_inheritance_conflict_resolution(self):
"""Test that a declared_attr can return the existing column and it will
be ignored. this allows conditional columns to be added.
@@ -302,20 +312,24 @@ class DeclarativeMixinTest(DeclarativeTestBase):
id = Column(Integer, primary_key=True)
class Mixin(object):
+
@declared_attr
def target_id(cls):
- return cls.__table__.c.get('target_id',
- Column(Integer, ForeignKey('other.id'))
- )
+ return cls.__table__.c.get(
+ 'target_id',
+ Column(Integer, ForeignKey('other.id'))
+ )
@declared_attr
def target(cls):
return relationship("Other")
class Engineer(Mixin, Person):
+
"""single table inheritance"""
class Manager(Mixin, Person):
+
"""single table inheritance"""
class Other(Base):
@@ -338,11 +352,10 @@ class DeclarativeMixinTest(DeclarativeTestBase):
Engineer(target=o1),
Manager(target=o2),
Manager(target=o1)
- ])
+ ])
session.commit()
eq_(session.query(Engineer).first().target, o1)
-
def test_columns_joined_table_inheritance(self):
"""Test a column on a mixin with an alternate attribute name,
mapped to a superclass and joined-table inheritance subclass.
@@ -428,6 +441,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_mapper_args_declared_attr(self):
class ComputedMapperArgs:
+
@declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Person':
@@ -454,6 +468,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
# ComputedMapperArgs on both classes for no apparent reason.
class ComputedMapperArgs:
+
@declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Person':
@@ -612,7 +627,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
@declared_attr
def __table_args__(cls):
- return {'mysql_engine':'InnoDB'}
+ return {'mysql_engine': 'InnoDB'}
@declared_attr
def __mapper_args__(cls):
@@ -640,13 +655,14 @@ class DeclarativeMixinTest(DeclarativeTestBase):
"""test the @declared_attr approach from a custom base."""
class Base(object):
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@declared_attr
def __table_args__(cls):
- return {'mysql_engine':'InnoDB'}
+ return {'mysql_engine': 'InnoDB'}
@declared_attr
def id(self):
@@ -714,7 +730,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(Generic.__table__.name, 'generic')
eq_(Specific.__table__.name, 'specific')
eq_(list(Generic.__table__.c.keys()), ['timestamp', 'id',
- 'python_type'])
+ 'python_type'])
eq_(list(Specific.__table__.c.keys()), ['id'])
eq_(Generic.__table__.kwargs, {'mysql_engine': 'InnoDB'})
eq_(Specific.__table__.kwargs, {'mysql_engine': 'InnoDB'})
@@ -749,7 +765,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(BaseType.__table__.name, 'basetype')
eq_(list(BaseType.__table__.c.keys()), ['timestamp', 'type', 'id',
- 'value'])
+ 'value'])
eq_(BaseType.__table__.kwargs, {'mysql_engine': 'InnoDB'})
assert Single.__table__ is BaseType.__table__
eq_(Joined.__table__.name, 'joined')
@@ -851,7 +867,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
@declared_attr
def __tablename__(cls):
if decl.has_inherited_table(cls) and TableNameMixin \
- not in cls.__bases__:
+ not in cls.__bases__:
return None
return cls.__name__.lower()
@@ -900,9 +916,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Model(Base, ColumnMixin):
- __table__ = Table('foo', Base.metadata, Column('data',
- Integer), Column('id', Integer,
- primary_key=True))
+ __table__ = Table('foo', Base.metadata,
+ Column('data', Integer),
+ Column('id', Integer, primary_key=True))
model_col = Model.__table__.c.data
mixin_col = ColumnMixin.data
@@ -920,8 +936,8 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Model(Base, ColumnMixin):
__table__ = Table('foo', Base.metadata,
- Column('data',Integer),
- Column('id', Integer,primary_key=True))
+ Column('data', Integer),
+ Column('id', Integer, primary_key=True))
foo = relationship("Dest")
assert_raises_message(sa.exc.ArgumentError,
@@ -942,9 +958,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Model(Base, ColumnMixin):
__table__ = Table('foo', Base.metadata,
- Column('data',Integer),
- Column('tada', Integer),
- Column('id', Integer,primary_key=True))
+ Column('data', Integer),
+ Column('tada', Integer),
+ Column('id', Integer, primary_key=True))
foo = relationship("Dest")
assert_raises_message(sa.exc.ArgumentError,
@@ -959,9 +975,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Model(Base, ColumnMixin):
- __table__ = Table('foo', Base.metadata, Column('data',
- Integer), Column('id', Integer,
- primary_key=True))
+ __table__ = Table('foo', Base.metadata,
+ Column('data', Integer),
+ Column('id', Integer, primary_key=True))
model_col = Model.__table__.c.data
mixin_col = ColumnMixin.data
@@ -987,10 +1003,11 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__tablename__ = 'model'
eq_(list(Model.__table__.c.keys()), ['col1', 'col3', 'col2', 'col4',
- 'id'])
+ 'id'])
def test_honor_class_mro_one(self):
class HasXMixin(object):
+
@declared_attr
def x(self):
return Column(Integer)
@@ -1007,6 +1024,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_honor_class_mro_two(self):
class HasXMixin(object):
+
@declared_attr
def x(self):
return Column(Integer)
@@ -1014,6 +1032,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class Parent(HasXMixin, Base):
__tablename__ = 'parent'
id = Column(Integer, primary_key=True)
+
def x(self):
return "hi"
@@ -1025,6 +1044,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_arbitrary_attrs_one(self):
class HasMixin(object):
+
@declared_attr
def some_attr(cls):
return cls.__name__ + "SOME ATTR"
@@ -1043,8 +1063,9 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__tablename__ = 'filter_a'
id = Column(Integer(), primary_key=True)
parent_id = Column(Integer(),
- ForeignKey('type_a.id'))
+ ForeignKey('type_a.id'))
filter = Column(String())
+
def __init__(self, filter_, **kw):
self.filter = filter_
@@ -1052,16 +1073,18 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__tablename__ = 'filter_b'
id = Column(Integer(), primary_key=True)
parent_id = Column(Integer(),
- ForeignKey('type_b.id'))
+ ForeignKey('type_b.id'))
filter = Column(String())
+
def __init__(self, filter_, **kw):
self.filter = filter_
class FilterMixin(object):
+
@declared_attr
def _filters(cls):
return relationship(cls.filter_class,
- cascade='all,delete,delete-orphan')
+ cascade='all,delete,delete-orphan')
@declared_attr
def filters(cls):
@@ -1080,6 +1103,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
TypeA(filters=['foo'])
TypeB(filters=['foo'])
+
class DeclarativeMixinPropertyTest(DeclarativeTestBase):
def test_column_property(self):
@@ -1118,9 +1142,9 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
sess.add_all([m1, m2])
sess.flush()
eq_(sess.query(MyModel).filter(MyModel.prop_hoho == 'foo'
- ).one(), m1)
+ ).one(), m1)
eq_(sess.query(MyOtherModel).filter(MyOtherModel.prop_hoho
- == 'bar').one(), m2)
+ == 'bar').one(), m2)
def test_doc(self):
"""test documentation transfer.
@@ -1198,7 +1222,6 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
ModelTwo.__table__.c.version_id
)
-
def test_deferred(self):
class MyMixin(object):
@@ -1235,8 +1258,8 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
@declared_attr
def target(cls):
return relationship('Target',
- primaryjoin='Target.id==%s.target_id'
- % cls.__name__)
+ primaryjoin='Target.id==%s.target_id'
+ % cls.__name__)
else:
@declared_attr
@@ -1279,7 +1302,199 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
self._test_relationship(True)
+class DeclaredAttrTest(DeclarativeTestBase, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def test_singleton_behavior_within_decl(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr
+ def my_prop(cls):
+ counter(cls)
+ return Column('x', Integer)
+
+ class A(Base, Mixin):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def my_other_prop(cls):
+ return column_property(cls.my_prop + 5)
+
+ eq_(counter.mock_calls, [mock.call(A)])
+
+ class B(Base, Mixin):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def my_other_prop(cls):
+ return column_property(cls.my_prop + 5)
+
+ eq_(
+ counter.mock_calls,
+ [mock.call(A), mock.call(B)])
+
+ # this is why we need singleton-per-class behavior. We get
+ # an un-bound "x" column otherwise here, because my_prop() generates
+ # multiple columns.
+ a_col = A.my_other_prop.__clause_element__().element.left
+ b_col = B.my_other_prop.__clause_element__().element.left
+ is_(a_col.table, A.__table__)
+ is_(b_col.table, B.__table__)
+ is_(a_col, A.__table__.c.x)
+ is_(b_col, B.__table__.c.x)
+
+ s = Session()
+ self.assert_compile(
+ s.query(A),
+ "SELECT a.x AS a_x, a.x + :x_1 AS anon_1, a.id AS a_id FROM a"
+ )
+ self.assert_compile(
+ s.query(B),
+ "SELECT b.x AS b_x, b.x + :x_1 AS anon_1, b.id AS b_id FROM b"
+ )
+
+
+ def test_singleton_gc(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr
+ def my_prop(cls):
+ counter(cls.__name__)
+ return Column('x', Integer)
+
+ class A(Base, Mixin):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def my_other_prop(cls):
+ return column_property(cls.my_prop + 5)
+
+ eq_(counter.mock_calls, [mock.call("A")])
+ del A
+ gc_collect()
+ assert "A" not in Base._decl_class_registry
+
+ def test_can_we_access_the_mixin_straight(self):
+ class Mixin(object):
+ @declared_attr
+ def my_prop(cls):
+ return Column('x', Integer)
+
+ assert_raises_message(
+ sa.exc.SAWarning,
+ "Unmanaged access of declarative attribute my_prop "
+ "from non-mapped class Mixin",
+ getattr, Mixin, "my_prop"
+ )
+
+ def test_property_noncascade(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr
+ def my_prop(cls):
+ counter(cls)
+ return column_property(cls.x + 2)
+
+ class A(Base, Mixin):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ x = Column(Integer)
+
+ class B(A):
+ pass
+
+ eq_(counter.mock_calls, [mock.call(A)])
+
+ def test_property_cascade(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr.cascading
+ def my_prop(cls):
+ counter(cls)
+ return column_property(cls.x + 2)
+
+ class A(Base, Mixin):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+ x = Column(Integer)
+
+ class B(A):
+ pass
+
+ eq_(counter.mock_calls, [mock.call(A), mock.call(B)])
+
+ def test_column_pre_map(self):
+ counter = mock.Mock()
+
+ class Mixin(object):
+ @declared_attr
+ def my_col(cls):
+ counter(cls)
+ assert not orm_base._mapper_or_none(cls)
+ return Column('x', Integer)
+
+ class A(Base, Mixin):
+ __tablename__ = 'a'
+
+ id = Column(Integer, primary_key=True)
+
+ eq_(counter.mock_calls, [mock.call(A)])
+
+ def test_mixin_attr_refers_to_column_copies(self):
+ # this @declared_attr can refer to User.id
+ # freely because we now do the "copy column" operation
+ # before the declared_attr is invoked.
+
+ counter = mock.Mock()
+
+ class HasAddressCount(object):
+ id = Column(Integer, primary_key=True)
+
+ @declared_attr
+ def address_count(cls):
+ counter(cls.id)
+ return column_property(
+ select([func.count(Address.id)]).
+ where(Address.user_id == cls.id).
+ as_scalar()
+ )
+
+ class Address(Base):
+ __tablename__ = 'address'
+ id = Column(Integer, primary_key=True)
+ user_id = Column(ForeignKey('user.id'))
+
+ class User(Base, HasAddressCount):
+ __tablename__ = 'user'
+
+ eq_(
+ counter.mock_calls,
+ [mock.call(User.id)]
+ )
+
+ sess = Session()
+ self.assert_compile(
+ sess.query(User).having(User.address_count > 5),
+ 'SELECT (SELECT count(address.id) AS '
+ 'count_1 FROM address WHERE address.user_id = "user".id) '
+ 'AS anon_1, "user".id AS user_id FROM "user" '
+ 'HAVING (SELECT count(address.id) AS '
+ 'count_1 FROM address WHERE address.user_id = "user".id) '
+ '> :param_1'
+ )
+
+
class AbstractTest(DeclarativeTestBase):
+
def test_abstract_boolean(self):
class A(Base):
diff --git a/test/ext/declarative/test_reflection.py b/test/ext/declarative/test_reflection.py
index f4bda6995..c7f7bc05d 100644
--- a/test/ext/declarative/test_reflection.py
+++ b/test/ext/declarative/test_reflection.py
@@ -1,7 +1,7 @@
from sqlalchemy.testing import eq_, assert_raises
from sqlalchemy.ext import declarative as decl
from sqlalchemy import testing
-from sqlalchemy import MetaData, Integer, String, ForeignKey
+from sqlalchemy import Integer, String, ForeignKey
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import relationship, create_session, \
clear_mappers, \
@@ -10,6 +10,7 @@ from sqlalchemy.testing import fixtures
from sqlalchemy.testing.util import gc_collect
from sqlalchemy.ext.declarative.base import _DeferredMapperConfig
+
class DeclarativeReflectionBase(fixtures.TablesTest):
__requires__ = 'reflectable_autoincrement',
@@ -21,13 +22,14 @@ class DeclarativeReflectionBase(fixtures.TablesTest):
super(DeclarativeReflectionBase, self).teardown()
clear_mappers()
+
class DeclarativeReflectionTest(DeclarativeReflectionBase):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)), test_needs_fk=True)
Table(
'addresses',
@@ -37,7 +39,7 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
Column('email', String(50)),
Column('user_id', Integer, ForeignKey('users.id')),
test_needs_fk=True,
- )
+ )
Table(
'imhandles',
metadata,
@@ -47,8 +49,7 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
Column('network', String(50)),
Column('handle', String(50)),
test_needs_fk=True,
- )
-
+ )
def test_basic(self):
class User(Base, fixtures.ComparableEntity):
@@ -69,13 +70,14 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
test_needs_autoincrement=True)
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
a1 = sess.query(Address).filter(Address.email == 'two').one()
eq_(a1, Address(email='two'))
eq_(a1.user, User(name='u1'))
@@ -100,13 +102,14 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
test_needs_autoincrement=True)
u1 = User(nom='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(nom='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(nom='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
a1 = sess.query(Address).filter(Address.email == 'two').one()
eq_(a1, Address(email='two'))
eq_(a1.user, User(nom='u1'))
@@ -131,61 +134,66 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase):
test_needs_autoincrement=True)
handles = relationship('IMHandle', backref='user')
- u1 = User(name='u1', handles=[IMHandle(network='blabber',
- handle='foo'), IMHandle(network='lol', handle='zomg'
- )])
+ u1 = User(name='u1', handles=[
+ IMHandle(network='blabber', handle='foo'),
+ IMHandle(network='lol', handle='zomg')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- handles=[IMHandle(network='blabber', handle='foo'),
- IMHandle(network='lol', handle='zomg')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', handles=[IMHandle(network='blabber', handle='foo'),
+ IMHandle(network='lol', handle='zomg')])])
a1 = sess.query(IMHandle).filter(IMHandle.handle == 'zomg'
- ).one()
+ ).one()
eq_(a1, IMHandle(network='lol', handle='zomg'))
eq_(a1.user, User(name='u1'))
+
class DeferredReflectBase(DeclarativeReflectionBase):
+
def teardown(self):
super(DeferredReflectBase, self).teardown()
_DeferredMapperConfig._configs.clear()
Base = None
+
class DeferredReflectPKFKTest(DeferredReflectBase):
+
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
- )
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
+ )
Table("b", metadata,
- Column('id', Integer,
- ForeignKey('a.id'),
- primary_key=True),
- Column('x', Integer, primary_key=True)
- )
+ Column('id', Integer,
+ ForeignKey('a.id'),
+ primary_key=True),
+ Column('x', Integer, primary_key=True)
+ )
def test_pk_fk(self):
class B(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'b'
a = relationship("A")
class A(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'a'
decl.DeferredReflection.prepare(testing.db)
+
class DeferredReflectionTest(DeferredReflectBase):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)), test_needs_fk=True)
Table(
'addresses',
@@ -195,7 +203,7 @@ class DeferredReflectionTest(DeferredReflectBase):
Column('email', String(50)),
Column('user_id', Integer, ForeignKey('users.id')),
test_needs_fk=True,
- )
+ )
def _roundtrip(self):
@@ -203,25 +211,26 @@ class DeferredReflectionTest(DeferredReflectBase):
Address = Base._decl_class_registry['Address']
u1 = User(name='u1', addresses=[Address(email='one'),
- Address(email='two')])
+ Address(email='two')])
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).all(), [User(name='u1',
- addresses=[Address(email='one'), Address(email='two')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1',
+ addresses=[Address(email='one'), Address(email='two')])])
a1 = sess.query(Address).filter(Address.email == 'two').one()
eq_(a1, Address(email='two'))
eq_(a1.user, User(name='u1'))
def test_basic_deferred(self):
class User(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'users'
addresses = relationship("Address", backref="user")
class Address(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'addresses'
decl.DeferredReflection.prepare(testing.db)
@@ -249,12 +258,12 @@ class DeferredReflectionTest(DeferredReflectBase):
def test_redefine_fk_double(self):
class User(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'users'
addresses = relationship("Address", backref="user")
class Address(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'addresses'
user_id = Column(Integer, ForeignKey('users.id'))
@@ -262,10 +271,11 @@ class DeferredReflectionTest(DeferredReflectBase):
self._roundtrip()
def test_mapper_args_deferred(self):
- """test that __mapper_args__ is not called until *after* table reflection"""
+ """test that __mapper_args__ is not called until *after*
+ table reflection"""
class User(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'users'
@decl.declared_attr
@@ -296,10 +306,11 @@ class DeferredReflectionTest(DeferredReflectBase):
@testing.requires.predictable_gc
def test_cls_not_strong_ref(self):
class User(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'users'
+
class Address(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'addresses'
eq_(len(_DeferredMapperConfig._configs), 2)
del Address
@@ -308,26 +319,28 @@ class DeferredReflectionTest(DeferredReflectBase):
decl.DeferredReflection.prepare(testing.db)
assert not _DeferredMapperConfig._configs
+
class DeferredSecondaryReflectionTest(DeferredReflectBase):
+
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer,
- primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer,
+ primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)), test_needs_fk=True)
Table('user_items', metadata,
- Column('user_id', ForeignKey('users.id'), primary_key=True),
- Column('item_id', ForeignKey('items.id'), primary_key=True),
- test_needs_fk=True
- )
+ Column('user_id', ForeignKey('users.id'), primary_key=True),
+ Column('item_id', ForeignKey('items.id'), primary_key=True),
+ test_needs_fk=True
+ )
Table('items', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50)),
- test_needs_fk=True
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50)),
+ test_needs_fk=True
+ )
def _roundtrip(self):
@@ -340,8 +353,8 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase):
sess.add(u1)
sess.commit()
- eq_(sess.query(User).all(), [User(name='u1',
- items=[Item(name='i1'), Item(name='i2')])])
+ eq_(sess.query(User).all(), [
+ User(name='u1', items=[Item(name='i1'), Item(name='i2')])])
def test_string_resolution(self):
class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
@@ -359,7 +372,8 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase):
class User(decl.DeferredReflection, fixtures.ComparableEntity, Base):
__tablename__ = 'users'
- items = relationship("Item", secondary=Table("user_items", Base.metadata))
+ items = relationship("Item",
+ secondary=Table("user_items", Base.metadata))
class Item(decl.DeferredReflection, fixtures.ComparableEntity, Base):
__tablename__ = 'items'
@@ -367,7 +381,9 @@ class DeferredSecondaryReflectionTest(DeferredReflectBase):
decl.DeferredReflection.prepare(testing.db)
self._roundtrip()
+
class DeferredInhReflectBase(DeferredReflectBase):
+
def _roundtrip(self):
Foo = Base._decl_class_registry['Foo']
Bar = Base._decl_class_registry['Bar']
@@ -392,24 +408,25 @@ class DeferredInhReflectBase(DeferredReflectBase):
]
)
+
class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
@classmethod
def define_tables(cls, metadata):
Table("foo", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(32)),
- Column('data', String(30)),
- Column('bar_data', String(30))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(32)),
+ Column('data', String(30)),
+ Column('bar_data', String(30))
+ )
def test_basic(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__mapper_args__ = {"polymorphic_identity": "bar"}
@@ -419,10 +436,10 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
def test_add_subclass_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__mapper_args__ = {"polymorphic_identity": "bar"}
@@ -433,10 +450,10 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
def test_add_pk_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
id = Column(Integer, primary_key=True)
class Bar(Foo):
@@ -445,28 +462,30 @@ class DeferredSingleInhReflectionTest(DeferredInhReflectBase):
decl.DeferredReflection.prepare(testing.db)
self._roundtrip()
+
class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
+
@classmethod
def define_tables(cls, metadata):
Table("foo", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('type', String(32)),
- Column('data', String(30)),
- test_needs_fk=True,
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('type', String(32)),
+ Column('data', String(30)),
+ test_needs_fk=True,
+ )
Table('bar', metadata,
- Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
- Column('bar_data', String(30)),
- test_needs_fk=True,
- )
+ Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
+ Column('bar_data', String(30)),
+ test_needs_fk=True,
+ )
def test_basic(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__tablename__ = 'bar'
@@ -477,10 +496,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
def test_add_subclass_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__tablename__ = 'bar'
@@ -492,10 +511,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
def test_add_pk_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
id = Column(Integer, primary_key=True)
class Bar(Foo):
@@ -507,10 +526,10 @@ class DeferredJoinedInhReflectionTest(DeferredInhReflectBase):
def test_add_fk_pk_column(self):
class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
- Base):
+ Base):
__tablename__ = 'foo'
__mapper_args__ = {"polymorphic_on": "type",
- "polymorphic_identity": "foo"}
+ "polymorphic_identity": "foo"}
class Bar(Foo):
__tablename__ = 'bar'
diff --git a/test/ext/test_automap.py b/test/ext/test_automap.py
index f24164cb7..0a57b9caa 100644
--- a/test/ext/test_automap.py
+++ b/test/ext/test_automap.py
@@ -1,13 +1,14 @@
-from sqlalchemy.testing import fixtures, eq_
+from sqlalchemy.testing import fixtures
from ..orm._fixtures import FixtureTest
from sqlalchemy.ext.automap import automap_base
-from sqlalchemy.orm import relationship, interfaces, backref
+from sqlalchemy.orm import relationship, interfaces, configure_mappers
from sqlalchemy.ext.automap import generate_relationship
-from sqlalchemy.testing.mock import Mock, call
+from sqlalchemy.testing.mock import Mock
from sqlalchemy import String, Integer, ForeignKey
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
+
class AutomapTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -27,6 +28,7 @@ class AutomapTest(fixtures.MappedTest):
def test_relationship_explicit_override_o2m(self):
Base = automap_base(metadata=self.metadata)
prop = relationship("addresses", collection_class=set)
+
class User(Base):
__tablename__ = 'users'
@@ -44,6 +46,7 @@ class AutomapTest(fixtures.MappedTest):
Base = automap_base(metadata=self.metadata)
prop = relationship("users")
+
class Address(Base):
__tablename__ = 'addresses'
@@ -57,7 +60,6 @@ class AutomapTest(fixtures.MappedTest):
u1 = User(name='u1', address_collection=[a1])
assert a1.users is u1
-
def test_relationship_self_referential(self):
Base = automap_base(metadata=self.metadata)
Base.prepare()
@@ -75,17 +77,19 @@ class AutomapTest(fixtures.MappedTest):
def classname_for_table(base, tablename, table):
return str("cls_" + tablename)
- def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
+ def name_for_scalar_relationship(
+ base, local_cls, referred_cls, constraint):
return "scalar_" + referred_cls.__name__
- def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
+ def name_for_collection_relationship(
+ base, local_cls, referred_cls, constraint):
return "coll_" + referred_cls.__name__
Base.prepare(
- classname_for_table=classname_for_table,
- name_for_scalar_relationship=name_for_scalar_relationship,
- name_for_collection_relationship=name_for_collection_relationship
- )
+ classname_for_table=classname_for_table,
+ name_for_scalar_relationship=name_for_scalar_relationship,
+ name_for_collection_relationship=name_for_collection_relationship
+ )
User = Base.classes.cls_users
Address = Base.classes.cls_addresses
@@ -113,9 +117,10 @@ class AutomapTest(fixtures.MappedTest):
class Order(Base):
__tablename__ = 'orders'
- items_collection = relationship("items",
- secondary="order_items",
- collection_class=set)
+ items_collection = relationship(
+ "items",
+ secondary="order_items",
+ collection_class=set)
Base.prepare()
Item = Base.classes['items']
@@ -133,41 +138,115 @@ class AutomapTest(fixtures.MappedTest):
Base = automap_base(metadata=self.metadata)
mock = Mock()
- def _gen_relationship(base, direction, return_fn, attrname,
- local_cls, referred_cls, **kw):
+
+ def _gen_relationship(
+ base, direction, return_fn, attrname,
+ local_cls, referred_cls, **kw):
mock(base, direction, attrname)
- return generate_relationship(base, direction, return_fn,
- attrname, local_cls, referred_cls, **kw)
+ return generate_relationship(
+ base, direction, return_fn,
+ attrname, local_cls, referred_cls, **kw)
Base.prepare(generate_relationship=_gen_relationship)
assert set(tuple(c[1]) for c in mock.mock_calls).issuperset([
- (Base, interfaces.MANYTOONE, "nodes"),
- (Base, interfaces.MANYTOMANY, "keywords_collection"),
- (Base, interfaces.MANYTOMANY, "items_collection"),
- (Base, interfaces.MANYTOONE, "users"),
- (Base, interfaces.ONETOMANY, "addresses_collection"),
+ (Base, interfaces.MANYTOONE, "nodes"),
+ (Base, interfaces.MANYTOMANY, "keywords_collection"),
+ (Base, interfaces.MANYTOMANY, "items_collection"),
+ (Base, interfaces.MANYTOONE, "users"),
+ (Base, interfaces.ONETOMANY, "addresses_collection"),
])
+class CascadeTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ "a", metadata,
+ Column('id', Integer, primary_key=True)
+ )
+ Table(
+ "b", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('aid', ForeignKey('a.id'), nullable=True)
+ )
+ Table(
+ "c", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('aid', ForeignKey('a.id'), nullable=False)
+ )
+ Table(
+ "d", metadata,
+ Column('id', Integer, primary_key=True),
+ Column(
+ 'aid', ForeignKey('a.id', ondelete="cascade"), nullable=False)
+ )
+ Table(
+ "e", metadata,
+ Column('id', Integer, primary_key=True),
+ Column(
+ 'aid', ForeignKey('a.id', ondelete="set null"),
+ nullable=True)
+ )
+
+ def test_o2m_relationship_cascade(self):
+ Base = automap_base(metadata=self.metadata)
+ Base.prepare()
+
+ configure_mappers()
+
+ b_rel = Base.classes.a.b_collection
+ assert not b_rel.property.cascade.delete
+ assert not b_rel.property.cascade.delete_orphan
+ assert not b_rel.property.passive_deletes
+
+ assert b_rel.property.cascade.save_update
+
+ c_rel = Base.classes.a.c_collection
+ assert c_rel.property.cascade.delete
+ assert c_rel.property.cascade.delete_orphan
+ assert not c_rel.property.passive_deletes
+
+ assert c_rel.property.cascade.save_update
+
+ d_rel = Base.classes.a.d_collection
+ assert d_rel.property.cascade.delete
+ assert d_rel.property.cascade.delete_orphan
+ assert d_rel.property.passive_deletes
+
+ assert d_rel.property.cascade.save_update
+
+ e_rel = Base.classes.a.e_collection
+ assert not e_rel.property.cascade.delete
+ assert not e_rel.property.cascade.delete_orphan
+ assert e_rel.property.passive_deletes
+
+ assert e_rel.property.cascade.save_update
+
+
class AutomapInhTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- Table('single', metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(10)),
- test_needs_fk=True
- )
-
- Table('joined_base', metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(10)),
- test_needs_fk=True
- )
-
- Table('joined_inh', metadata,
- Column('id', Integer, ForeignKey('joined_base.id'), primary_key=True),
- test_needs_fk=True
- )
+ Table(
+ 'single', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(10)),
+ test_needs_fk=True
+ )
+
+ Table(
+ 'joined_base', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(10)),
+ test_needs_fk=True
+ )
+
+ Table(
+ 'joined_inh', metadata,
+ Column(
+ 'id', Integer,
+ ForeignKey('joined_base.id'), primary_key=True),
+ test_needs_fk=True
+ )
FixtureTest.define_tables(metadata)
@@ -179,7 +258,8 @@ class AutomapInhTest(fixtures.MappedTest):
type = Column(String)
- __mapper_args__ = {"polymorphic_identity": "u0",
+ __mapper_args__ = {
+ "polymorphic_identity": "u0",
"polymorphic_on": type}
class SubUser1(Single):
@@ -200,14 +280,14 @@ class AutomapInhTest(fixtures.MappedTest):
type = Column(String)
- __mapper_args__ = {"polymorphic_identity": "u0",
+ __mapper_args__ = {
+ "polymorphic_identity": "u0",
"polymorphic_on": type}
class SubJoined(Joined):
__tablename__ = 'joined_inh'
__mapper_args__ = {"polymorphic_identity": "u1"}
-
Base.prepare(engine=testing.db, reflect=True)
assert SubJoined.__mapper__.inherits is Joined.__mapper__
@@ -217,6 +297,9 @@ class AutomapInhTest(fixtures.MappedTest):
def test_conditional_relationship(self):
Base = automap_base()
+
def _gen_relationship(*arg, **kw):
return None
- Base.prepare(engine=testing.db, reflect=True, generate_relationship=_gen_relationship)
+ Base.prepare(
+ engine=testing.db, reflect=True,
+ generate_relationship=_gen_relationship)
diff --git a/test/ext/test_orderinglist.py b/test/ext/test_orderinglist.py
index 3223c8048..0eba137e7 100644
--- a/test/ext/test_orderinglist.py
+++ b/test/ext/test_orderinglist.py
@@ -349,6 +349,28 @@ class OrderingListTest(fixtures.TestBase):
self.assert_(srt.bullets[1].text == 'new 2')
self.assert_(srt.bullets[2].text == '3')
+ def test_replace_two(self):
+ """test #3191"""
+
+ self._setup(ordering_list('position', reorder_on_append=True))
+
+ s1 = Slide('Slide #1')
+
+ b1, b2, b3, b4 = Bullet('1'), Bullet('2'), Bullet('3'), Bullet('4')
+ s1.bullets = [b1, b2, b3]
+
+ eq_(
+ [b.position for b in s1.bullets],
+ [0, 1, 2]
+ )
+
+ s1.bullets = [b4, b2, b1]
+ eq_(
+ [b.position for b in s1.bullets],
+ [0, 1, 2]
+ )
+
+
def test_funky_ordering(self):
class Pos(object):
def __init__(self):
diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py
index be42cce52..dbbe4c435 100644
--- a/test/orm/inheritance/test_single.py
+++ b/test/orm/inheritance/test_single.py
@@ -386,7 +386,31 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
]
)
- def test_outer_join(self):
+ def test_of_type_aliased_fromjoinpoint(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'employee':relationship(Employee)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company).outerjoin(
+ Company.employee.of_type(Engineer),
+ aliased=True, from_joinpoint=True),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name FROM companies "
+ "LEFT OUTER JOIN employees AS employees_1 ON "
+ "companies.company_id = employees_1.company_id "
+ "AND employees_1.type IN (:type_1)"
+ )
+
+ def test_outer_join_prop(self):
Company, Employee, Engineer = self.classes.Company,\
self.classes.Employee,\
self.classes.Engineer
@@ -407,7 +431,7 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
"= employees.company_id AND employees.type IN (:type_1)"
)
- def test_outer_join_alias(self):
+ def test_outer_join_prop_alias(self):
Company, Employee, Engineer = self.classes.Company,\
self.classes.Employee,\
self.classes.Engineer
@@ -431,6 +455,184 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
)
+ def test_outer_join_literal_onclause(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, Engineer).outerjoin(
+ Engineer, Company.company_id == Engineer.company_id),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees.employee_id AS employees_employee_id, "
+ "employees.name AS employees_name, "
+ "employees.manager_data AS employees_manager_data, "
+ "employees.engineer_info AS employees_engineer_info, "
+ "employees.type AS employees_type, "
+ "employees.company_id AS employees_company_id FROM companies "
+ "LEFT OUTER JOIN employees ON "
+ "companies.company_id = employees.company_id "
+ "AND employees.type IN (:type_1)"
+ )
+
+ def test_outer_join_literal_onclause_alias(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ eng_alias = aliased(Engineer)
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, eng_alias).outerjoin(
+ eng_alias, Company.company_id == eng_alias.company_id),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees_1.employee_id AS employees_1_employee_id, "
+ "employees_1.name AS employees_1_name, "
+ "employees_1.manager_data AS employees_1_manager_data, "
+ "employees_1.engineer_info AS employees_1_engineer_info, "
+ "employees_1.type AS employees_1_type, "
+ "employees_1.company_id AS employees_1_company_id "
+ "FROM companies LEFT OUTER JOIN employees AS employees_1 ON "
+ "companies.company_id = employees_1.company_id "
+ "AND employees_1.type IN (:type_1)"
+ )
+
+ def test_outer_join_no_onclause(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, Engineer).outerjoin(
+ Engineer),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees.employee_id AS employees_employee_id, "
+ "employees.name AS employees_name, "
+ "employees.manager_data AS employees_manager_data, "
+ "employees.engineer_info AS employees_engineer_info, "
+ "employees.type AS employees_type, "
+ "employees.company_id AS employees_company_id "
+ "FROM companies LEFT OUTER JOIN employees ON "
+ "companies.company_id = employees.company_id "
+ "AND employees.type IN (:type_1)"
+ )
+
+ def test_outer_join_no_onclause_alias(self):
+ Company, Employee, Engineer = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'engineers':relationship(Engineer)
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+
+ eng_alias = aliased(Engineer)
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, eng_alias).outerjoin(
+ eng_alias),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees_1.employee_id AS employees_1_employee_id, "
+ "employees_1.name AS employees_1_name, "
+ "employees_1.manager_data AS employees_1_manager_data, "
+ "employees_1.engineer_info AS employees_1_engineer_info, "
+ "employees_1.type AS employees_1_type, "
+ "employees_1.company_id AS employees_1_company_id "
+ "FROM companies LEFT OUTER JOIN employees AS employees_1 ON "
+ "companies.company_id = employees_1.company_id "
+ "AND employees_1.type IN (:type_1)"
+ )
+
+ def test_no_aliasing_from_overlap(self):
+ # test [ticket:3233]
+
+ Company, Employee, Engineer, Manager = self.classes.Company,\
+ self.classes.Employee,\
+ self.classes.Engineer,\
+ self.classes.Manager
+
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'employees': relationship(Employee, backref="company")
+ })
+ mapper(Employee, employees, polymorphic_on=employees.c.type)
+ mapper(Engineer, inherits=Employee, polymorphic_identity='engineer')
+ mapper(Manager, inherits=Employee, polymorphic_identity='manager')
+
+ s = create_session()
+
+ q1 = s.query(Engineer).\
+ join(Engineer.company).\
+ join(Manager, Company.employees)
+
+ q2 = s.query(Engineer).\
+ join(Engineer.company).\
+ join(Manager, Company.company_id == Manager.company_id)
+
+ q3 = s.query(Engineer).\
+ join(Engineer.company).\
+ join(Manager, Company.employees.of_type(Manager))
+
+ q4 = s.query(Engineer).\
+ join(Company, Company.company_id == Engineer.company_id).\
+ join(Manager, Company.employees.of_type(Manager))
+
+ q5 = s.query(Engineer).\
+ join(Company, Company.company_id == Engineer.company_id).\
+ join(Manager, Company.company_id == Manager.company_id)
+
+ # note that the query is incorrect SQL; we JOIN to
+ # employees twice. However, this is what's expected so we seek
+ # to be consistent; previously, aliasing would sneak in due to the
+ # nature of the "left" side.
+ for q in [q1, q2, q3, q4, q5]:
+ self.assert_compile(
+ q,
+ "SELECT employees.employee_id AS employees_employee_id, "
+ "employees.name AS employees_name, "
+ "employees.manager_data AS employees_manager_data, "
+ "employees.engineer_info AS employees_engineer_info, "
+ "employees.type AS employees_type, "
+ "employees.company_id AS employees_company_id "
+ "FROM employees JOIN companies "
+ "ON companies.company_id = employees.company_id "
+ "JOIN employees "
+ "ON companies.company_id = employees.company_id "
+ "AND employees.type IN (:type_1) "
+ "WHERE employees.type IN (:type_2)"
+ )
+
def test_relationship_to_subclass(self):
JuniorEngineer, Company, companies, Manager, \
Employee, employees, Engineer = (self.classes.JuniorEngineer,
diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py
index 2bee3cbd6..48faa172f 100644
--- a/test/orm/test_assorted_eager.py
+++ b/test/orm/test_assorted_eager.py
@@ -82,8 +82,8 @@ class EagerTest(fixtures.MappedTest):
mapper(Category, categories)
mapper(Option, options, properties=dict(
- owner=relationship(Owner),
- test=relationship(Thing)))
+ owner=relationship(Owner, viewonly=True),
+ test=relationship(Thing, viewonly=True)))
mapper(Thing, tests, properties=dict(
owner=relationship(Owner, backref='tests'),
diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py
index 46d5f86e5..9c1f7a985 100644
--- a/test/orm/test_attributes.py
+++ b/test/orm/test_attributes.py
@@ -2522,6 +2522,53 @@ class ListenerTest(fixtures.ORMTest):
f1.barset.add(b1)
assert f1.barset.pop().data == 'some bar appended'
+ def test_named(self):
+ canary = Mock()
+
+ class Foo(object):
+ pass
+
+ class Bar(object):
+ pass
+
+ instrumentation.register_class(Foo)
+ instrumentation.register_class(Bar)
+ attributes.register_attribute(
+ Foo, 'data', uselist=False,
+ useobject=False)
+ attributes.register_attribute(
+ Foo, 'barlist', uselist=True,
+ useobject=True)
+
+ event.listen(Foo.data, 'set', canary.set, named=True)
+ event.listen(Foo.barlist, 'append', canary.append, named=True)
+ event.listen(Foo.barlist, 'remove', canary.remove, named=True)
+
+ f1 = Foo()
+ b1 = Bar()
+ f1.data = 5
+ f1.barlist.append(b1)
+ f1.barlist.remove(b1)
+ eq_(
+ canary.mock_calls,
+ [
+ call.set(
+ oldvalue=attributes.NO_VALUE,
+ initiator=attributes.Event(
+ Foo.data.impl, attributes.OP_REPLACE),
+ target=f1, value=5),
+ call.append(
+ initiator=attributes.Event(
+ Foo.barlist.impl, attributes.OP_APPEND),
+ target=f1,
+ value=b1),
+ call.remove(
+ initiator=attributes.Event(
+ Foo.barlist.impl, attributes.OP_REMOVE),
+ target=f1,
+ value=b1)]
+ )
+
def test_collection_link_events(self):
class Foo(object):
pass
@@ -2559,9 +2606,6 @@ class ListenerTest(fixtures.ORMTest):
)
-
-
-
def test_none_on_collection_event(self):
"""test that append/remove of None in collections emits events.
diff --git a/test/orm/test_bind.py b/test/orm/test_bind.py
index 0d869130b..33cd66ebc 100644
--- a/test/orm/test_bind.py
+++ b/test/orm/test_bind.py
@@ -1,14 +1,206 @@
-from sqlalchemy.testing import assert_raises, assert_raises_message
-from sqlalchemy import MetaData, Integer
+from sqlalchemy.testing import assert_raises_message
+from sqlalchemy import MetaData, Integer, ForeignKey
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.schema import Column
from sqlalchemy.orm import mapper, create_session
import sqlalchemy as sa
from sqlalchemy import testing
-from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import fixtures, eq_, engines, is_
+from sqlalchemy.orm import relationship, Session, backref, sessionmaker
+from test.orm import _fixtures
+from sqlalchemy.testing.mock import Mock
-class BindTest(fixtures.MappedTest):
+class BindIntegrationTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+ def test_mapped_binds(self):
+ Address, addresses, users, User = (self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+
+ # ensure tables are unbound
+ m2 = sa.MetaData()
+ users_unbound = users.tometadata(m2)
+ addresses_unbound = addresses.tometadata(m2)
+
+ mapper(Address, addresses_unbound)
+ mapper(User, users_unbound, properties={
+ 'addresses': relationship(Address,
+ backref=backref("user", cascade="all"),
+ cascade="all")})
+
+ sess = Session(binds={User: self.metadata.bind,
+ Address: self.metadata.bind})
+
+ u1 = User(id=1, name='ed')
+ sess.add(u1)
+ eq_(sess.query(User).filter(User.id == 1).all(),
+ [User(id=1, name='ed')])
+
+ # test expression binding
+
+ sess.execute(users_unbound.insert(), params=dict(id=2,
+ name='jack'))
+ eq_(sess.execute(users_unbound.select(users_unbound.c.id
+ == 2)).fetchall(), [(2, 'jack')])
+
+ eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
+ [(2, 'jack')])
+
+ sess.execute(users_unbound.delete())
+ eq_(sess.execute(users_unbound.select()).fetchall(), [])
+
+ sess.close()
+
+ def test_table_binds(self):
+ Address, addresses, users, User = (self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+
+ # ensure tables are unbound
+ m2 = sa.MetaData()
+ users_unbound = users.tometadata(m2)
+ addresses_unbound = addresses.tometadata(m2)
+
+ mapper(Address, addresses_unbound)
+ mapper(User, users_unbound, properties={
+ 'addresses': relationship(Address,
+ backref=backref("user", cascade="all"),
+ cascade="all")})
+
+ Session = sessionmaker(binds={users_unbound: self.metadata.bind,
+ addresses_unbound: self.metadata.bind})
+ sess = Session()
+
+ u1 = User(id=1, name='ed')
+ sess.add(u1)
+ eq_(sess.query(User).filter(User.id == 1).all(),
+ [User(id=1, name='ed')])
+
+ sess.execute(users_unbound.insert(), params=dict(id=2, name='jack'))
+
+ eq_(sess.execute(users_unbound.select(users_unbound.c.id
+ == 2)).fetchall(), [(2, 'jack')])
+
+ eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
+ [(2, 'jack')])
+
+ sess.execute(users_unbound.delete())
+ eq_(sess.execute(users_unbound.select()).fetchall(), [])
+
+ sess.close()
+
+ def test_bind_from_metadata(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+
+ session = create_session()
+ session.execute(users.insert(), dict(name='Johnny'))
+
+ assert len(session.query(User).filter_by(name='Johnny').all()) == 1
+
+ session.execute(users.delete())
+
+ assert len(session.query(User).filter_by(name='Johnny').all()) == 0
+ session.close()
+
+ def test_bind_arguments(self):
+ users, Address, addresses, User = (self.tables.users,
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
+
+ mapper(User, users)
+ mapper(Address, addresses)
+
+ e1 = engines.testing_engine()
+ e2 = engines.testing_engine()
+ e3 = engines.testing_engine()
+
+ sess = Session(e3)
+ sess.bind_mapper(User, e1)
+ sess.bind_mapper(Address, e2)
+
+ assert sess.connection().engine is e3
+ assert sess.connection(bind=e1).engine is e1
+ assert sess.connection(mapper=Address, bind=e1).engine is e1
+ assert sess.connection(mapper=Address).engine is e2
+ assert sess.connection(clause=addresses.select()).engine is e2
+ assert sess.connection(mapper=User,
+ clause=addresses.select()).engine is e1
+ assert sess.connection(mapper=User,
+ clause=addresses.select(),
+ bind=e2).engine is e2
+
+ sess.close()
+
+ @engines.close_open_connections
+ def test_bound_connection(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+ c = testing.db.connect()
+ sess = create_session(bind=c)
+ sess.begin()
+ transaction = sess.transaction
+ u = User(name='u1')
+ sess.add(u)
+ sess.flush()
+ assert transaction._connection_for_bind(testing.db) \
+ is transaction._connection_for_bind(c) is c
+
+ assert_raises_message(sa.exc.InvalidRequestError,
+ 'Session already has a Connection '
+ 'associated',
+ transaction._connection_for_bind,
+ testing.db.connect())
+ transaction.rollback()
+ assert len(sess.query(User).all()) == 0
+ sess.close()
+
+ def test_bound_connection_transactional(self):
+ User, users = self.classes.User, self.tables.users
+
+ mapper(User, users)
+ c = testing.db.connect()
+
+ sess = create_session(bind=c, autocommit=False)
+ u = User(name='u1')
+ sess.add(u)
+ sess.flush()
+ sess.close()
+ assert not c.in_transaction()
+ assert c.scalar("select count(1) from users") == 0
+
+ sess = create_session(bind=c, autocommit=False)
+ u = User(name='u2')
+ sess.add(u)
+ sess.flush()
+ sess.commit()
+ assert not c.in_transaction()
+ assert c.scalar("select count(1) from users") == 1
+ c.execute("delete from users")
+ assert c.scalar("select count(1) from users") == 0
+
+ c = testing.db.connect()
+
+ trans = c.begin()
+ sess = create_session(bind=c, autocommit=True)
+ u = User(name='u3')
+ sess.add(u)
+ sess.flush()
+ assert c.in_transaction()
+ trans.commit()
+ assert not c.in_transaction()
+ assert c.scalar("select count(1) from users") == 1
+
+
+class SessionBindTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('test_table', metadata,
@@ -60,3 +252,216 @@ class BindTest(fixtures.MappedTest):
sess.flush)
+class GetBindTest(fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'base_table', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+ Table(
+ 'w_mixin_table', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+ Table(
+ 'joined_sub_table', metadata,
+ Column('id', ForeignKey('base_table.id'), primary_key=True)
+ )
+ Table(
+ 'concrete_sub_table', metadata,
+ Column('id', Integer, primary_key=True)
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class MixinOne(cls.Basic):
+ pass
+
+ class BaseClass(cls.Basic):
+ pass
+
+ class ClassWMixin(MixinOne, cls.Basic):
+ pass
+
+ class JoinedSubClass(BaseClass):
+ pass
+
+ class ConcreteSubClass(BaseClass):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.ClassWMixin, cls.tables.w_mixin_table)
+ mapper(cls.classes.BaseClass, cls.tables.base_table)
+ mapper(
+ cls.classes.JoinedSubClass,
+ cls.tables.joined_sub_table, inherits=cls.classes.BaseClass)
+ mapper(
+ cls.classes.ConcreteSubClass,
+ cls.tables.concrete_sub_table, inherits=cls.classes.BaseClass,
+ concrete=True)
+
+ def _fixture(self, binds):
+ return Session(binds=binds)
+
+ def test_fallback_table_metadata(self):
+ session = self._fixture({})
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ testing.db
+ )
+
+ def test_bind_base_table_base_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.tables.base_table: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+
+ def test_bind_base_table_joined_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.tables.base_table: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ base_class_bind
+ )
+
+ def test_bind_joined_sub_table_joined_sub_class(self):
+ base_class_bind = Mock(name='base')
+ joined_class_bind = Mock(name='joined')
+ session = self._fixture({
+ self.tables.base_table: base_class_bind,
+ self.tables.joined_sub_table: joined_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ # joined table inheritance has to query based on the base
+ # table, so this is what we expect
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ base_class_bind
+ )
+
+ def test_bind_base_table_concrete_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.tables.base_table: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ testing.db
+ )
+
+ def test_bind_sub_table_concrete_sub_class(self):
+ base_class_bind = Mock(name='base')
+ concrete_sub_bind = Mock(name='concrete')
+
+ session = self._fixture({
+ self.tables.base_table: base_class_bind,
+ self.tables.concrete_sub_table: concrete_sub_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ concrete_sub_bind
+ )
+
+ def test_bind_base_class_base_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+
+ def test_bind_mixin_class_simple_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.MixinOne: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.ClassWMixin),
+ base_class_bind
+ )
+
+ def test_bind_base_class_joined_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ base_class_bind
+ )
+
+ def test_bind_joined_sub_class_joined_sub_class(self):
+ base_class_bind = Mock(name='base')
+ joined_class_bind = Mock(name='joined')
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind,
+ self.classes.JoinedSubClass: joined_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.JoinedSubClass),
+ joined_class_bind
+ )
+
+ def test_bind_base_class_concrete_sub_class(self):
+ base_class_bind = Mock()
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ base_class_bind
+ )
+
+ def test_bind_sub_class_concrete_sub_class(self):
+ base_class_bind = Mock(name='base')
+ concrete_sub_bind = Mock(name='concrete')
+
+ session = self._fixture({
+ self.classes.BaseClass: base_class_bind,
+ self.classes.ConcreteSubClass: concrete_sub_bind
+ })
+
+ is_(
+ session.get_bind(self.classes.BaseClass),
+ base_class_bind
+ )
+ is_(
+ session.get_bind(self.classes.ConcreteSubClass),
+ concrete_sub_bind
+ )
+
+
diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py
index bd6a17286..e39911d0f 100644
--- a/test/orm/test_cascade.py
+++ b/test/orm/test_cascade.py
@@ -1,3 +1,4 @@
+import copy
from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy import Integer, String, ForeignKey, Sequence, \
@@ -13,6 +14,7 @@ from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
+
class CascadeArgTest(fixtures.MappedTest):
run_inserts = None
run_create_tables = None
@@ -85,6 +87,12 @@ class CascadeArgTest(fixtures.MappedTest):
orm_util.CascadeOptions("all, delete-orphan"),
frozenset)
+ def test_cascade_deepcopy(self):
+ old = orm_util.CascadeOptions("all, delete-orphan")
+ new = copy.deepcopy(old)
+ eq_(old, new)
+
+
def test_cascade_assignable(self):
User, Address = self.classes.User, self.classes.Address
users, addresses = self.tables.users, self.tables.addresses
diff --git a/test/orm/test_collection.py b/test/orm/test_collection.py
index f94c742b3..82331b9af 100644
--- a/test/orm/test_collection.py
+++ b/test/orm/test_collection.py
@@ -2191,6 +2191,23 @@ class InstrumentationTest(fixtures.ORMTest):
f1.attr = l2
eq_(canary, [adapter_1, f1.attr._sa_adapter, None])
+ def test_referenced_by_owner(self):
+
+ class Foo(object):
+ pass
+
+ instrumentation.register_class(Foo)
+ attributes.register_attribute(
+ Foo, 'attr', uselist=True, useobject=True)
+
+ f1 = Foo()
+ f1.attr.append(3)
+
+ adapter = collections.collection_adapter(f1.attr)
+ assert adapter._referenced_by_owner
+
+ f1.attr = []
+ assert not adapter._referenced_by_owner
diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py
index 214b592b5..4c6d9bbe1 100644
--- a/test/orm/test_eager_relations.py
+++ b/test/orm/test_eager_relations.py
@@ -1253,8 +1253,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
orders=relationship(Order, lazy=False, order_by=orders.c.id),
))
q = create_session().query(User)
- self.l = q.all()
- eq_(self.static.user_all_result, q.order_by(User.id).all())
+ def go():
+ eq_(self.static.user_all_result, q.order_by(User.id).all())
+ self.assert_sql_count(testing.db, go, 1)
def test_against_select(self):
"""test eager loading of a mapper which is against a select"""
diff --git a/test/orm/test_events.py b/test/orm/test_events.py
index e6efd6fb9..904293102 100644
--- a/test/orm/test_events.py
+++ b/test/orm/test_events.py
@@ -112,6 +112,7 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
mapper(User, users)
canary = self.listen_all(User)
+ named_canary = self.listen_all(User, named=True)
sess = create_session()
u = User(name='u1')
@@ -125,13 +126,15 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
sess.flush()
sess.delete(u)
sess.flush()
- eq_(canary,
- ['init', 'before_insert',
- 'after_insert', 'expire',
- 'refresh',
- 'load',
- 'before_update', 'after_update', 'before_delete',
- 'after_delete'])
+ expected = [
+ 'init', 'before_insert',
+ 'after_insert', 'expire',
+ 'refresh',
+ 'load',
+ 'before_update', 'after_update', 'before_delete',
+ 'after_delete']
+ eq_(canary, expected)
+ eq_(named_canary, expected)
def test_insert_before_configured(self):
users, User = self.tables.users, self.classes.User
@@ -1193,6 +1196,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
'before_commit', 'after_commit','after_transaction_end']
)
+
def test_rollback_hook(self):
User, users = self.classes.User, self.tables.users
sess, canary = self._listener_fixture()
diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py
index 40bc01b5d..eba47dbec 100644
--- a/test/orm/test_joins.py
+++ b/test/orm/test_joins.py
@@ -361,6 +361,27 @@ class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL):
)
+class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+ @classmethod
+ def setup_mappers(cls):
+ User = cls.classes.User
+ Address = cls.classes.Address
+ users, addresses = (cls.tables.users, cls.tables.addresses)
+ mapper(User, users, properties={
+ 'addresses': relationship(Address),
+ 'ad_syn': synonym("addresses")
+ })
+ mapper(Address, addresses)
+
+ def test_join_on_synonym(self):
+ User = self.classes.User
+ self.assert_compile(
+ Session().query(User).join(User.ad_syn),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users JOIN addresses ON users.id = addresses.user_id"
+ )
+
+
class JoinTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -409,24 +430,6 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
sess.query(literal_column('x'), User).join, Address
)
- def test_join_on_synonym(self):
-
- class User(object):
- pass
- class Address(object):
- pass
- users, addresses = (self.tables.users, self.tables.addresses)
- mapper(User, users, properties={
- 'addresses':relationship(Address),
- 'ad_syn':synonym("addresses")
- })
- mapper(Address, addresses)
- self.assert_compile(
- Session().query(User).join(User.ad_syn),
- "SELECT users.id AS users_id, users.name AS users_name "
- "FROM users JOIN addresses ON users.id = addresses.user_id"
- )
-
def test_multi_tuple_form(self):
"""test the 'tuple' form of join, now superseded
by the two-element join() form.
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 1c5fca144..354bbe5b1 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -1236,7 +1236,7 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = 'default'
run_setup_mappers = 'each'
- def _fixture(self, label=True):
+ def _fixture(self, label=True, polymorphic=False):
User, Address = self.classes("User", "Address")
users, addresses = self.tables("users", "addresses")
stmt = select([func.max(addresses.c.email_address)]).\
@@ -1247,7 +1247,7 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(User, users, properties={
"ead": column_property(stmt)
- })
+ }, with_polymorphic="*" if polymorphic else None)
mapper(Address, addresses)
def test_order_by_column_prop_string(self):
@@ -1355,6 +1355,22 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"users AS users_1 ORDER BY email_ad, anon_1"
)
+ def test_order_by_column_labeled_prop_attr_aliased_four(self):
+ User = self.classes.User
+ self._fixture(label=True, polymorphic=True)
+
+ ua = aliased(User)
+ s = Session()
+ q = s.query(ua, User.id).order_by(ua.ead)
+ self.assert_compile(
+ q,
+ "SELECT (SELECT max(addresses.email_address) AS max_1 FROM "
+ "addresses WHERE addresses.user_id = users_1.id) AS anon_1, "
+ "users_1.id AS users_1_id, users_1.name AS users_1_name, "
+ "users.id AS users_id FROM users AS users_1, users ORDER BY anon_1"
+ )
+
+
def test_order_by_column_unlabeled_prop_attr_aliased_one(self):
User = self.classes.User
self._fixture(label=False)
@@ -2467,6 +2483,8 @@ class YieldTest(_fixtures.FixtureTest):
class HintsTest(QueryTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
+
def test_hints(self):
User = self.classes.User
@@ -2502,6 +2520,28 @@ class HintsTest(QueryTest, AssertsCompiledSQL):
"ON users_1.id > users.id", dialect=dialect
)
+ def test_statement_hints(self):
+ User = self.classes.User
+
+ sess = create_session()
+ stmt = sess.query(User).\
+ with_statement_hint("test hint one").\
+ with_statement_hint("test hint two").\
+ with_statement_hint("test hint three", "postgresql")
+
+ self.assert_compile(
+ stmt,
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users test hint one test hint two",
+ )
+
+ self.assert_compile(
+ stmt,
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users test hint one test hint two test hint three",
+ dialect='postgresql'
+ )
+
class TextTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/orm/test_rel_fn.py b/test/orm/test_rel_fn.py
index f0aa538f4..150b59b75 100644
--- a/test/orm/test_rel_fn.py
+++ b/test/orm/test_rel_fn.py
@@ -242,6 +242,22 @@ class _JoinFixtures(object):
**kw
)
+ def _join_fixture_o2m_composite_selfref_func_remote_side(self, **kw):
+ return relationships.JoinCondition(
+ self.composite_selfref,
+ self.composite_selfref,
+ self.composite_selfref,
+ self.composite_selfref,
+ primaryjoin=and_(
+ self.composite_selfref.c.group_id ==
+ func.foo(self.composite_selfref.c.group_id),
+ self.composite_selfref.c.parent_id ==
+ self.composite_selfref.c.id
+ ),
+ remote_side=set([self.composite_selfref.c.parent_id]),
+ **kw
+ )
+
def _join_fixture_o2m_composite_selfref_func_annotated(self, **kw):
return relationships.JoinCondition(
self.composite_selfref,
@@ -729,6 +745,10 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase,
self._join_fixture_o2m_composite_selfref_func
)
+ def test_determine_local_remote_pairs_o2m_composite_selfref_func_rs(self):
+ # no warning
+ self._join_fixture_o2m_composite_selfref_func_remote_side()
+
def test_determine_local_remote_pairs_o2m_overlap_func_warning(self):
self._assert_non_simple_warning(
self._join_fixture_m2o_sub_to_joined_sub_func
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index 6bcb02639..2a15ce666 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -5,20 +5,22 @@ from sqlalchemy import testing
from sqlalchemy import Integer, String, ForeignKey, MetaData, and_
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, relation, \
- backref, create_session, configure_mappers, \
- clear_mappers, sessionmaker, attributes,\
- Session, composite, column_property, foreign,\
- remote, synonym, joinedload, subqueryload
-from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
+ backref, create_session, configure_mappers, \
+ clear_mappers, sessionmaker, attributes,\
+ Session, composite, column_property, foreign,\
+ remote, synonym, joinedload, subqueryload
+from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE
from sqlalchemy.testing import eq_, startswith_, AssertsCompiledSQL, is_
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy import exc
from sqlalchemy import inspect
+
class _RelationshipErrors(object):
+
def _assert_raises_no_relevant_fks(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
sa.exc.ArgumentError,
"Could not locate any relevant foreign key columns "
@@ -33,7 +35,7 @@ class _RelationshipErrors(object):
)
def _assert_raises_no_equality(self, fn, expr, relname,
- primary, *arg, **kw):
+ primary, *arg, **kw):
assert_raises_message(
sa.exc.ArgumentError,
"Could not locate any simple equality expressions "
@@ -50,7 +52,7 @@ class _RelationshipErrors(object):
)
def _assert_raises_ambig_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.ArgumentError,
@@ -78,7 +80,7 @@ class _RelationshipErrors(object):
fn, *arg, **kw)
def _assert_raises_no_join(self, fn, relname, secondary_arg,
- *arg, **kw):
+ *arg, **kw):
if secondary_arg is not None:
assert_raises_message(
exc.NoForeignKeysError,
@@ -86,7 +88,8 @@ class _RelationshipErrors(object):
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables "
"via secondary table '%s'. "
- "Ensure that referencing columns are associated with a ForeignKey "
+ "Ensure that referencing columns are associated with a "
+ "ForeignKey "
"or ForeignKeyConstraint, or specify 'primaryjoin' and "
"'secondaryjoin' expressions"
% (relname, secondary_arg),
@@ -97,7 +100,8 @@ class _RelationshipErrors(object):
"Could not determine join condition between "
"parent/child tables on relationship %s - "
"there are no foreign keys linking these tables. "
- "Ensure that referencing columns are associated with a ForeignKey "
+ "Ensure that referencing columns are associated with a "
+ "ForeignKey "
"or ForeignKeyConstraint, or specify a 'primaryjoin' "
"expression."
% (relname,),
@@ -125,12 +129,16 @@ class _RelationshipErrors(object):
"pairs based on join condition and remote_side arguments. "
r"Consider using the remote\(\) annotation to "
"accurately mark those elements of the join "
- "condition that are on the remote side of the relationship." % relname,
+ "condition that are on the remote side of the relationship." % (
+ relname
+ ),
fn, *arg, **kw
)
+
class DependencyTwoParentTest(fixtures.MappedTest):
+
"""Test flush() when a mapper is dependent on multiple relationships"""
run_setup_mappers = 'once'
@@ -140,74 +148,77 @@ class DependencyTwoParentTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("tbl_a", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(128)))
Table("tbl_b", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(128)))
Table("tbl_c", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
- nullable=False),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
+ nullable=False),
+ Column("name", String(128)))
Table("tbl_d", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
- nullable=False),
- Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
- Column("name", String(128)))
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
+ nullable=False),
+ Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
+ Column("name", String(128)))
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
+
class B(cls.Basic):
pass
+
class C(cls.Basic):
pass
+
class D(cls.Basic):
pass
@classmethod
def setup_mappers(cls):
A, C, B, D, tbl_b, tbl_c, tbl_a, tbl_d = (cls.classes.A,
- cls.classes.C,
- cls.classes.B,
- cls.classes.D,
- cls.tables.tbl_b,
- cls.tables.tbl_c,
- cls.tables.tbl_a,
- cls.tables.tbl_d)
+ cls.classes.C,
+ cls.classes.B,
+ cls.classes.D,
+ cls.tables.tbl_b,
+ cls.tables.tbl_c,
+ cls.tables.tbl_a,
+ cls.tables.tbl_d)
mapper(A, tbl_a, properties=dict(
c_rows=relationship(C, cascade="all, delete-orphan",
- backref="a_row")))
+ backref="a_row")))
mapper(B, tbl_b)
mapper(C, tbl_c, properties=dict(
d_rows=relationship(D, cascade="all, delete-orphan",
- backref="c_row")))
+ backref="c_row")))
mapper(D, tbl_d, properties=dict(
b_row=relationship(B)))
@classmethod
def insert_data(cls):
A, C, B, D = (cls.classes.A,
- cls.classes.C,
- cls.classes.B,
- cls.classes.D)
+ cls.classes.C,
+ cls.classes.B,
+ cls.classes.D)
session = create_session()
a = A(name='a1')
b = B(name='b1')
c = C(name='c1', a_row=a)
- d1 = D(name='d1', b_row=b, c_row=c)
- d2 = D(name='d2', b_row=b, c_row=c)
- d3 = D(name='d3', b_row=b, c_row=c)
+ d1 = D(name='d1', b_row=b, c_row=c) # noqa
+ d2 = D(name='d2', b_row=b, c_row=c) # noqa
+ d3 = D(name='d3', b_row=b, c_row=c) # noqa
session.add(a)
session.add(b)
session.flush()
@@ -230,7 +241,9 @@ class DependencyTwoParentTest(fixtures.MappedTest):
session.delete(c)
session.flush()
+
class M2ODontOverwriteFKTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table(
@@ -248,13 +261,13 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest):
class A(fixtures.BasicEntity):
pass
+
class B(fixtures.BasicEntity):
pass
-
mapper(A, a, properties={
- 'b': relationship(B, uselist=uselist)
- })
+ 'b': relationship(B, uselist=uselist)
+ })
mapper(B, b)
return A, B
@@ -271,7 +284,6 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest):
sess.commit()
# test that was broken by #3060
- from sqlalchemy.orm import joinedload
a1 = sess.query(A).options(joinedload("b")).first()
a1.bid = b1.id
sess.flush()
@@ -340,8 +352,8 @@ class M2ODontOverwriteFKTest(fixtures.MappedTest):
assert a1.bid is not None
-
class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
+
"""Tests the ultimate join condition, a single column
that points to itself, e.g. within a SQL function or similar.
The test is against a materialized path setup.
@@ -365,28 +377,28 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
Table('entity', metadata,
- Column('path', String(100), primary_key=True)
- )
+ Column('path', String(100), primary_key=True)
+ )
@classmethod
def setup_classes(cls):
class Entity(cls.Basic):
+
def __init__(self, path):
self.path = path
-
def _descendants_fixture(self, data=True):
Entity = self.classes.Entity
entity = self.tables.entity
m = mapper(Entity, entity, properties={
- "descendants": relationship(Entity,
- primaryjoin=
- remote(foreign(entity.c.path)).like(
- entity.c.path.concat('/%')),
- viewonly=True,
- order_by=entity.c.path)
- })
+ "descendants": relationship(
+ Entity,
+ primaryjoin=remote(foreign(entity.c.path)).like(
+ entity.c.path.concat('/%')),
+ viewonly=True,
+ order_by=entity.c.path)
+ })
configure_mappers()
assert m.get_property("descendants").direction is ONETOMANY
if data:
@@ -397,13 +409,13 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
entity = self.tables.entity
m = mapper(Entity, entity, properties={
- "anscestors": relationship(Entity,
- primaryjoin=
- entity.c.path.like(
- remote(foreign(entity.c.path)).concat('/%')),
- viewonly=True,
- order_by=entity.c.path)
- })
+ "anscestors": relationship(
+ Entity,
+ primaryjoin=entity.c.path.like(
+ remote(foreign(entity.c.path)).concat('/%')),
+ viewonly=True,
+ order_by=entity.c.path)
+ })
configure_mappers()
assert m.get_property("anscestors").direction is ONETOMANY
if data:
@@ -447,7 +459,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
sess = self._descendants_fixture()
Entity = self.classes.Entity
e1 = sess.query(Entity).filter_by(path="/foo").\
- options(joinedload(Entity.descendants)).first()
+ options(joinedload(Entity.descendants)).first()
eq_(
[e.path for e in e1.descendants],
@@ -459,7 +471,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
sess = self._descendants_fixture()
Entity = self.classes.Entity
e1 = sess.query(Entity).filter_by(path="/foo").\
- options(subqueryload(Entity.descendants)).first()
+ options(subqueryload(Entity.descendants)).first()
eq_(
[e.path for e in e1.descendants],
@@ -471,7 +483,7 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
sess = self._anscestors_fixture()
Entity = self.classes.Entity
e1 = sess.query(Entity).filter_by(path="/foo/bar2/bat1").\
- options(joinedload(Entity.anscestors)).first()
+ options(joinedload(Entity.anscestors)).first()
eq_(
[e.path for e in e1.anscestors],
["/foo", "/foo/bar2"]
@@ -488,8 +500,8 @@ class DirectSelfRefFKTest(fixtures.MappedTest, AssertsCompiledSQL):
)
-
class CompositeSelfRefFKTest(fixtures.MappedTest):
+
"""Tests a composite FK where, in
the relationship(), one col points
to itself in the same table.
@@ -515,7 +527,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table('company_t', metadata,
Column('company_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('name', String(30)))
Table('employee_t', metadata,
@@ -533,10 +545,12 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
@classmethod
def setup_classes(cls):
class Company(cls.Basic):
+
def __init__(self, name):
self.name = name
class Employee(cls.Basic):
+
def __init__(self, name, company, emp_id, reports_to=None):
self.name = name
self.company = company
@@ -545,116 +559,202 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
def test_explicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company,
- primaryjoin=employee_t.c.company_id==
- company_t.c.company_id,
- backref='employees'),
- 'reports_to':relationship(Employee, primaryjoin=
- sa.and_(
- employee_t.c.emp_id==employee_t.c.reports_to_id,
- employee_t.c.company_id==employee_t.c.company_id
- ),
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company,
+ primaryjoin=employee_t.c.company_id ==
+ company_t.c.company_id,
+ backref='employees'),
+ 'reports_to': relationship(Employee, primaryjoin=sa.and_(
+ employee_t.c.emp_id == employee_t.c.reports_to_id,
+ employee_t.c.company_id == employee_t.c.company_id
+ ),
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
- foreign_keys=[employee_t.c.reports_to_id, employee_t.c.company_id],
+ foreign_keys=[
+ employee_t.c.reports_to_id, employee_t.c.company_id],
backref=backref('employees',
- foreign_keys=[employee_t.c.reports_to_id,
- employee_t.c.company_id]))
+ foreign_keys=[employee_t.c.reports_to_id,
+ employee_t.c.company_id]))
})
self._test()
def test_implicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
foreign_keys=[employee_t.c.reports_to_id,
- employee_t.c.company_id],
- backref=backref('employees', foreign_keys=
- [employee_t.c.reports_to_id, employee_t.c.company_id])
- )
+ employee_t.c.company_id],
+ backref=backref(
+ 'employees',
+ foreign_keys=[
+ employee_t.c.reports_to_id, employee_t.c.company_id])
+ )
})
self._test()
def test_very_implicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
backref='employees'
- )
+ )
})
self._test()
def test_very_explicit(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
- _local_remote_pairs = [
- (employee_t.c.reports_to_id, employee_t.c.emp_id),
- (employee_t.c.company_id, employee_t.c.company_id)
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
+ _local_remote_pairs=[
+ (employee_t.c.reports_to_id, employee_t.c.emp_id),
+ (employee_t.c.company_id, employee_t.c.company_id)
],
- foreign_keys=[employee_t.c.reports_to_id,
- employee_t.c.company_id],
- backref=backref('employees', foreign_keys=
- [employee_t.c.reports_to_id, employee_t.c.company_id])
- )
+ foreign_keys=[
+ employee_t.c.reports_to_id,
+ employee_t.c.company_id],
+ backref=backref(
+ 'employees',
+ foreign_keys=[
+ employee_t.c.reports_to_id, employee_t.c.company_id])
+ )
})
self._test()
def test_annotated(self):
Employee, Company, employee_t, company_t = (self.classes.Employee,
- self.classes.Company,
- self.tables.employee_t,
- self.tables.company_t)
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
mapper(Company, company_t)
- mapper(Employee, employee_t, properties= {
- 'company':relationship(Company, backref='employees'),
- 'reports_to':relationship(Employee,
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
primaryjoin=sa.and_(
- remote(employee_t.c.emp_id)==employee_t.c.reports_to_id,
- remote(employee_t.c.company_id)==employee_t.c.company_id
+ remote(employee_t.c.emp_id) == employee_t.c.reports_to_id,
+ remote(employee_t.c.company_id) == employee_t.c.company_id
),
backref=backref('employees')
- )
+ )
})
self._test()
+ def test_overlapping_warning(self):
+ Employee, Company, employee_t, company_t = (self.classes.Employee,
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
+
+ mapper(Company, company_t)
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
+ primaryjoin=sa.and_(
+ remote(employee_t.c.emp_id) == employee_t.c.reports_to_id,
+ remote(employee_t.c.company_id) == employee_t.c.company_id
+ ),
+ backref=backref('employees')
+ )
+ })
+
+ assert_raises_message(
+ exc.SAWarning,
+ r"relationship .* will copy column .* to column "
+ "employee_t.company_id, which conflicts with relationship\(s\)",
+ configure_mappers
+ )
+
+ def test_annotated_no_overwriting(self):
+ Employee, Company, employee_t, company_t = (self.classes.Employee,
+ self.classes.Company,
+ self.tables.employee_t,
+ self.tables.company_t)
+
+ mapper(Company, company_t)
+ mapper(Employee, employee_t, properties={
+ 'company': relationship(Company, backref='employees'),
+ 'reports_to': relationship(
+ Employee,
+ primaryjoin=sa.and_(
+ remote(employee_t.c.emp_id) ==
+ foreign(employee_t.c.reports_to_id),
+ remote(employee_t.c.company_id) == employee_t.c.company_id
+ ),
+ backref=backref('employees')
+ )
+ })
+
+ self._test_no_warning()
+
+ def _test_no_overwrite(self, sess, expect_failure):
+ # test [ticket:3230]
+
+ Employee, Company = self.classes.Employee, self.classes.Company
+
+ c1 = sess.query(Company).filter_by(name='c1').one()
+ e3 = sess.query(Employee).filter_by(name='emp3').one()
+ e3.reports_to = None
+
+ if expect_failure:
+ # if foreign() isn't applied specifically to
+ # employee_t.c.reports_to_id only, then
+ # employee_t.c.company_id goes foreign as well and then
+ # this happens
+ assert_raises_message(
+ AssertionError,
+ "Dependency rule tried to blank-out primary key column "
+ "'employee_t.company_id'",
+ sess.flush
+ )
+ else:
+ sess.flush()
+ eq_(e3.company, c1)
+
+ @testing.emits_warning("relationship .* will copy column ")
def _test(self):
+ self._test_no_warning(overwrites=True)
+
+ def _test_no_warning(self, overwrites=False):
self._test_relationships()
sess = Session()
self._setup_data(sess)
self._test_lazy_relations(sess)
self._test_join_aliasing(sess)
+ self._test_no_overwrite(sess, expect_failure=overwrites)
def _test_relationships(self):
configure_mappers()
@@ -665,7 +765,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
set([
(employee_t.c.company_id, employee_t.c.company_id),
(employee_t.c.emp_id, employee_t.c.reports_to_id),
- ])
+ ])
)
eq_(
Employee.employees.property.remote_side,
@@ -676,7 +776,7 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
set([
(employee_t.c.company_id, employee_t.c.company_id),
(employee_t.c.reports_to_id, employee_t.c.emp_id),
- ])
+ ])
)
def _setup_data(self, sess):
@@ -686,12 +786,12 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
c2 = Company('c2')
e1 = Employee('emp1', c1, 1)
- e2 = Employee('emp2', c1, 2, e1)
+ e2 = Employee('emp2', c1, 2, e1) # noqa
e3 = Employee('emp3', c1, 3, e1)
- e4 = Employee('emp4', c1, 4, e3)
+ e4 = Employee('emp4', c1, 4, e3) # noqa
e5 = Employee('emp5', c2, 1)
- e6 = Employee('emp6', c2, 2, e5)
- e7 = Employee('emp7', c2, 3, e5)
+ e6 = Employee('emp6', c2, 2, e5) # noqa
+ e7 = Employee('emp7', c2, 3, e5) # noqa
sess.add_all((c1, c2))
sess.commit()
@@ -711,64 +811,66 @@ class CompositeSelfRefFKTest(fixtures.MappedTest):
assert test_e5.name == 'emp5', test_e5.name
assert [x.name for x in test_e1.employees] == ['emp2', 'emp3']
assert sess.query(Employee).\
- get([c1.company_id, 3]).reports_to.name == 'emp1'
+ get([c1.company_id, 3]).reports_to.name == 'emp1'
assert sess.query(Employee).\
- get([c2.company_id, 3]).reports_to.name == 'emp5'
+ get([c2.company_id, 3]).reports_to.name == 'emp5'
def _test_join_aliasing(self, sess):
Employee, Company = self.classes.Employee, self.classes.Company
eq_(
- [n for n, in sess.query(Employee.name).\
- join(Employee.reports_to, aliased=True).\
- filter_by(name='emp5').\
- reset_joinpoint().\
- order_by(Employee.name)],
+ [n for n, in sess.query(Employee.name).
+ join(Employee.reports_to, aliased=True).
+ filter_by(name='emp5').
+ reset_joinpoint().
+ order_by(Employee.name)],
['emp6', 'emp7']
)
-
class CompositeJoinPartialFK(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = 'default'
+
@classmethod
def define_tables(cls, metadata):
Table("parent", metadata,
- Column('x', Integer, primary_key=True),
- Column('y', Integer, primary_key=True),
- Column('z', Integer),
- )
+ Column('x', Integer, primary_key=True),
+ Column('y', Integer, primary_key=True),
+ Column('z', Integer),
+ )
Table("child", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('x', Integer),
- Column('y', Integer),
- Column('z', Integer),
- # note 'z' is not here
- sa.ForeignKeyConstraint(
- ["x", "y"],
- ["parent.x", "parent.y"]
- )
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer),
+ Column('y', Integer),
+ Column('z', Integer),
+ # note 'z' is not here
+ sa.ForeignKeyConstraint(
+ ["x", "y"],
+ ["parent.x", "parent.y"]
+ )
+ )
+
@classmethod
def setup_mappers(cls):
parent, child = cls.tables.parent, cls.tables.child
+
class Parent(cls.Comparable):
pass
class Child(cls.Comparable):
pass
mapper(Parent, parent, properties={
- 'children':relationship(Child, primaryjoin=and_(
- parent.c.x==child.c.x,
- parent.c.y==child.c.y,
- parent.c.z==child.c.z,
+ 'children': relationship(Child, primaryjoin=and_(
+ parent.c.x == child.c.x,
+ parent.c.y == child.c.y,
+ parent.c.z == child.c.z,
))
})
mapper(Child, child)
def test_joins_fully(self):
Parent, Child = self.classes.Parent, self.classes.Child
- s = Session()
+
self.assert_compile(
Parent.children.property.strategy._lazywhere,
":param_1 = child.x AND :param_2 = child.y AND :param_3 = child.z"
@@ -776,19 +878,20 @@ class CompositeJoinPartialFK(fixtures.MappedTest, AssertsCompiledSQL):
class SynonymsAsFKsTest(fixtures.MappedTest):
+
"""Syncrules on foreign keys that are also primary"""
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id",Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column("foo",Integer,),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("foo", Integer,),
test_needs_fk=True)
- Table("tableB",metadata,
- Column("id",Integer,primary_key=True,
- test_needs_autoincrement=True),
+ Table("tableB", metadata,
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column("_a_id", Integer, key='a_id', primary_key=True),
test_needs_fk=True)
@@ -798,6 +901,7 @@ class SynonymsAsFKsTest(fixtures.MappedTest):
pass
class B(cls.Basic):
+
@property
def a_id(self):
return self._a_id
@@ -832,18 +936,19 @@ class SynonymsAsFKsTest(fixtures.MappedTest):
class FKsAsPksTest(fixtures.MappedTest):
+
"""Syncrules on foreign keys that are also primary"""
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id",Integer,primary_key=True,
- test_needs_autoincrement=True),
- Column("foo",Integer,),
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("foo", Integer,),
test_needs_fk=True)
- Table("tableB",metadata,
- Column("id",Integer,ForeignKey("tableA.id"),primary_key=True),
+ Table("tableB", metadata,
+ Column("id", Integer, ForeignKey("tableA.id"), primary_key=True),
test_needs_fk=True)
@classmethod
@@ -863,9 +968,8 @@ class FKsAsPksTest(fixtures.MappedTest):
self.classes.B,
self.tables.tableA)
-
mapper(A, tableA, properties={
- 'b':relationship(B, cascade="all,delete-orphan", uselist=False)})
+ 'b': relationship(B, cascade="all,delete-orphan", uselist=False)})
mapper(B, tableB)
configure_mappers()
@@ -890,7 +994,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(A, tableA, properties={
- 'bs':relationship(B, cascade="save-update")})
+ 'bs': relationship(B, cascade="save-update")})
mapper(B, tableB)
a1 = A()
@@ -915,7 +1019,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(B, tableB, properties={
- 'a':relationship(A, cascade="save-update")})
+ 'a': relationship(A, cascade="save-update")})
mapper(A, tableA)
b1 = B()
@@ -938,7 +1042,8 @@ class FKsAsPksTest(fixtures.MappedTest):
A, tableA = self.classes.A, self.tables.tableA
# postgresql cant handle a nullable PK column...?
- tableC = Table('tablec', tableA.metadata,
+ tableC = Table(
+ 'tablec', tableA.metadata,
Column('id', Integer, primary_key=True),
Column('a_id', Integer, ForeignKey('tableA.id'),
primary_key=True, autoincrement=False, nullable=True))
@@ -947,7 +1052,7 @@ class FKsAsPksTest(fixtures.MappedTest):
class C(fixtures.BasicEntity):
pass
mapper(C, tableC, properties={
- 'a':relationship(A, cascade="save-update")
+ 'a': relationship(A, cascade="save-update")
})
mapper(A, tableA)
@@ -968,12 +1073,11 @@ class FKsAsPksTest(fixtures.MappedTest):
self.classes.B,
self.tables.tableA)
-
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
"save-update, delete, delete-orphan"):
mapper(B, tableB, properties={
- 'a':relationship(A, cascade=cascade, single_parent=True)
+ 'a': relationship(A, cascade=cascade, single_parent=True)
})
mapper(A, tableA)
@@ -999,12 +1103,11 @@ class FKsAsPksTest(fixtures.MappedTest):
self.classes.B,
self.tables.tableA)
-
for cascade in ("save-update, delete",
#"save-update, delete-orphan",
"save-update, delete, delete-orphan"):
mapper(A, tableA, properties={
- 'bs':relationship(B, cascade=cascade)
+ 'bs': relationship(B, cascade=cascade)
})
mapper(B, tableB)
@@ -1029,7 +1132,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(A, tableA, properties={
- 'bs':relationship(B, cascade="none")})
+ 'bs': relationship(B, cascade="none")})
mapper(B, tableB)
a1 = A()
@@ -1054,7 +1157,7 @@ class FKsAsPksTest(fixtures.MappedTest):
self.tables.tableA)
mapper(B, tableB, properties={
- 'a':relationship(A, cascade="none")})
+ 'a': relationship(A, cascade="none")})
mapper(A, tableA)
b1 = B()
@@ -1070,39 +1173,42 @@ class FKsAsPksTest(fixtures.MappedTest):
assert a1 not in sess
assert b1 not in sess
+
class UniqueColReferenceSwitchTest(fixtures.MappedTest):
+
"""test a relationship based on a primary
join against a unique non-pk column"""
@classmethod
def define_tables(cls, metadata):
Table("table_a", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("ident", String(10), nullable=False,
- unique=True),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("ident", String(10), nullable=False,
+ unique=True),
+ )
Table("table_b", metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("a_ident", String(10),
- ForeignKey('table_a.ident'),
- nullable=False),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("a_ident", String(10),
+ ForeignKey('table_a.ident'),
+ nullable=False),
+ )
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
def test_switch_parent(self):
A, B, table_b, table_a = (self.classes.A,
- self.classes.B,
- self.tables.table_b,
- self.tables.table_a)
+ self.classes.B,
+ self.tables.table_b,
+ self.tables.table_a)
mapper(A, table_a)
mapper(B, table_b, properties={"a": relationship(A, backref="bs")})
@@ -1122,7 +1228,9 @@ class UniqueColReferenceSwitchTest(fixtures.MappedTest):
session.delete(a1)
session.flush()
+
class RelationshipToSelectableTest(fixtures.MappedTest):
+
"""Test a map to a select that relates to a map to the table."""
@classmethod
@@ -1142,33 +1250,40 @@ class RelationshipToSelectableTest(fixtures.MappedTest):
class Container(fixtures.BasicEntity):
pass
+
class LineItem(fixtures.BasicEntity):
pass
container_select = sa.select(
[items.c.policyNum, items.c.policyEffDate, items.c.type],
distinct=True,
- ).alias('container_select')
+ ).alias('container_select')
mapper(LineItem, items)
- mapper(Container,
- container_select,
- order_by=sa.asc(container_select.c.type),
- properties=dict(
- lineItems=relationship(LineItem,
- lazy='select',
- cascade='all, delete-orphan',
- order_by=sa.asc(items.c.id),
- primaryjoin=sa.and_(
- container_select.c.policyNum==items.c.policyNum,
- container_select.c.policyEffDate==
- items.c.policyEffDate,
- container_select.c.type==items.c.type),
- foreign_keys=[
- items.c.policyNum,
- items.c.policyEffDate,
- items.c.type])))
+ mapper(
+ Container,
+ container_select,
+ order_by=sa.asc(container_select.c.type),
+ properties=dict(
+ lineItems=relationship(
+ LineItem,
+ lazy='select',
+ cascade='all, delete-orphan',
+ order_by=sa.asc(items.c.id),
+ primaryjoin=sa.and_(
+ container_select.c.policyNum == items.c.policyNum,
+ container_select.c.policyEffDate ==
+ items.c.policyEffDate,
+ container_select.c.type == items.c.type),
+ foreign_keys=[
+ items.c.policyNum,
+ items.c.policyEffDate,
+ items.c.type
+ ]
+ )
+ )
+ )
session = create_session()
con = Container()
@@ -1189,7 +1304,9 @@ class RelationshipToSelectableTest(fixtures.MappedTest):
for old, new in zip(con.lineItems, newcon.lineItems):
eq_(old.id, new.id)
+
class FKEquatedToConstantTest(fixtures.MappedTest):
+
"""test a relationship with a non-column entity in the primary join,
is not viewonly, and also has the non-column's clause mentioned in the
foreign keys list.
@@ -1199,31 +1316,32 @@ class FKEquatedToConstantTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('tags', metadata, Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("data", String(50)),
- )
+ test_needs_autoincrement=True),
+ Column("data", String(50)),
+ )
Table('tag_foo', metadata,
- Column("id", Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('tagid', Integer),
- Column("data", String(50)),
- )
+ Column("id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('tagid', Integer),
+ Column("data", String(50)),
+ )
def test_basic(self):
tag_foo, tags = self.tables.tag_foo, self.tables.tags
class Tag(fixtures.ComparableEntity):
pass
+
class TagInstance(fixtures.ComparableEntity):
pass
mapper(Tag, tags, properties={
- 'foo':relationship(TagInstance,
- primaryjoin=sa.and_(tag_foo.c.data=='iplc_case',
- tag_foo.c.tagid==tags.c.id),
- foreign_keys=[tag_foo.c.tagid, tag_foo.c.data],
- ),
+ 'foo': relationship(
+ TagInstance,
+ primaryjoin=sa.and_(tag_foo.c.data == 'iplc_case',
+ tag_foo.c.tagid == tags.c.id),
+ foreign_keys=[tag_foo.c.tagid, tag_foo.c.data]),
})
mapper(TagInstance, tag_foo)
@@ -1248,41 +1366,43 @@ class FKEquatedToConstantTest(fixtures.MappedTest):
[TagInstance(data='iplc_case'), TagInstance(data='not_iplc_case')]
)
+
class BackrefPropagatesForwardsArgs(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('name', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(50))
+ )
Table('addresses', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('user_id', Integer),
- Column('email', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer),
+ Column('email', String(50))
+ )
@classmethod
def setup_classes(cls):
class User(cls.Comparable):
pass
+
class Address(cls.Comparable):
pass
def test_backref(self):
User, Address, users, addresses = (self.classes.User,
- self.classes.Address,
- self.tables.users,
- self.tables.addresses)
-
+ self.classes.Address,
+ self.tables.users,
+ self.tables.addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address,
- primaryjoin=addresses.c.user_id==users.c.id,
- foreign_keys=addresses.c.user_id,
- backref='user')
+ 'addresses': relationship(
+ Address,
+ primaryjoin=addresses.c.user_id == users.c.id,
+ foreign_keys=addresses.c.user_id,
+ backref='user')
})
mapper(Address, addresses)
@@ -1292,9 +1412,11 @@ class BackrefPropagatesForwardsArgs(fixtures.MappedTest):
sess.commit()
eq_(sess.query(Address).all(), [
Address(email='a1', user=User(name='u1'))
- ])
+ ])
+
class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
+
"""test ambiguous joins due to FKs on both sides treated as
self-referential.
@@ -1307,25 +1429,28 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- subscriber_table = Table('subscriber', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- )
-
- address_table = Table('address',
- metadata,
- Column('subscriber_id', Integer,
- ForeignKey('subscriber.id'), primary_key=True),
- Column('type', String(1), primary_key=True),
- )
+ Table(
+ 'subscriber', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True))
+
+ Table(
+ 'address', metadata,
+ Column(
+ 'subscriber_id', Integer,
+ ForeignKey('subscriber.id'), primary_key=True),
+ Column('type', String(1), primary_key=True),
+ )
@classmethod
def setup_mappers(cls):
subscriber, address = cls.tables.subscriber, cls.tables.address
- subscriber_and_address = subscriber.join(address,
- and_(address.c.subscriber_id==subscriber.c.id,
- address.c.type.in_(['A', 'B', 'C'])))
+ subscriber_and_address = subscriber.join(
+ address,
+ and_(address.c.subscriber_id == subscriber.c.id,
+ address.c.type.in_(['A', 'B', 'C'])))
class Address(cls.Comparable):
pass
@@ -1336,10 +1461,10 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
mapper(Address, address)
mapper(Subscriber, subscriber_and_address, properties={
- 'id':[subscriber.c.id, address.c.subscriber_id],
- 'addresses' : relationship(Address,
- backref=backref("customer"))
- })
+ 'id': [subscriber.c.id, address.c.subscriber_id],
+ 'addresses': relationship(Address,
+ backref=backref("customer"))
+ })
def test_mapping(self):
Subscriber, Address = self.classes.Subscriber, self.classes.Address
@@ -1349,11 +1474,11 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
assert Address.customer.property.direction is MANYTOONE
s1 = Subscriber(type='A',
- addresses = [
- Address(type='D'),
- Address(type='E'),
- ]
- )
+ addresses=[
+ Address(type='D'),
+ Address(type='E'),
+ ]
+ )
a1 = Address(type='B', customer=Subscriber(type='C'))
assert s1.addresses[0].customer is s1
@@ -1375,22 +1500,23 @@ class AmbiguousJoinInterpretedAsSelfRef(fixtures.MappedTest):
class ManualBackrefTest(_fixtures.FixtureTest):
+
"""Test explicit relationships that are backrefs to each other."""
run_inserts = None
def test_o2m(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, back_populates='user')
+ 'addresses': relationship(Address, back_populates='user')
})
mapper(Address, addresses, properties={
- 'user':relationship(User, back_populates='addresses')
+ 'user': relationship(User, back_populates='addresses')
})
sess = create_session()
@@ -1409,52 +1535,56 @@ class ManualBackrefTest(_fixtures.FixtureTest):
def test_invalid_key(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, back_populates='userr')
+ 'addresses': relationship(Address, back_populates='userr')
})
mapper(Address, addresses, properties={
- 'user':relationship(User, back_populates='addresses')
+ 'user': relationship(User, back_populates='addresses')
})
assert_raises(sa.exc.InvalidRequestError, configure_mappers)
def test_invalid_target(self):
- addresses, Dingaling, User, dingalings, Address, users = (self.tables.addresses,
- self.classes.Dingaling,
- self.classes.User,
- self.tables.dingalings,
- self.classes.Address,
- self.tables.users)
+ addresses, Dingaling, User, dingalings, Address, users = (
+ self.tables.addresses,
+ self.classes.Dingaling,
+ self.classes.User,
+ self.tables.dingalings,
+ self.classes.Address,
+ self.tables.users)
mapper(User, users, properties={
- 'addresses':relationship(Address, back_populates='dingaling'),
+ 'addresses': relationship(Address, back_populates='dingaling'),
})
mapper(Dingaling, dingalings)
mapper(Address, addresses, properties={
- 'dingaling':relationship(Dingaling)
+ 'dingaling': relationship(Dingaling)
})
assert_raises_message(sa.exc.ArgumentError,
- r"reverse_property 'dingaling' on relationship "
- "User.addresses references "
- "relationship Address.dingaling, which does not "
- "reference mapper Mapper\|User\|users",
- configure_mappers)
+ r"reverse_property 'dingaling' on relationship "
+ "User.addresses references "
+ "relationship Address.dingaling, which does not "
+ "reference mapper Mapper\|User\|users",
+ configure_mappers)
+
class JoinConditionErrorTest(fixtures.TestBase):
def test_clauseelement_pj(self):
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
+
class C1(Base):
__tablename__ = 'c1'
id = Column('id', Integer, primary_key=True)
+
class C2(Base):
__tablename__ = 'c2'
id = Column('id', Integer, primary_key=True)
@@ -1466,39 +1596,42 @@ class JoinConditionErrorTest(fixtures.TestBase):
def test_clauseelement_pj_false(self):
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
+
class C1(Base):
__tablename__ = 'c1'
id = Column('id', Integer, primary_key=True)
+
class C2(Base):
__tablename__ = 'c2'
id = Column('id', Integer, primary_key=True)
c1id = Column('c1id', Integer, ForeignKey('c1.id'))
- c2 = relationship(C1, primaryjoin="x"=="y")
+ c2 = relationship(C1, primaryjoin="x" == "y")
assert_raises(sa.exc.ArgumentError, configure_mappers)
def test_only_column_elements(self):
m = MetaData()
t1 = Table('t1', m,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer, ForeignKey('t2.id')),
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer, ForeignKey('t2.id')),
+ )
t2 = Table('t2', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
+
class C1(object):
pass
+
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2,
- primaryjoin=t1.join(t2))})
+ mapper(C1, t1, properties={
+ 'c2': relationship(C2, primaryjoin=t1.join(t2))})
mapper(C2, t2)
assert_raises(sa.exc.ArgumentError, configure_mappers)
def test_invalid_string_args(self):
from sqlalchemy.ext.declarative import declarative_base
- from sqlalchemy import util
for argname, arg in [
('remote_side', ['c1.id']),
@@ -1508,8 +1641,9 @@ class JoinConditionErrorTest(fixtures.TestBase):
('order_by', ['id']),
]:
clear_mappers()
- kw = {argname:arg}
+ kw = {argname: arg}
Base = declarative_base()
+
class C1(Base):
__tablename__ = 'c1'
id = Column('id', Integer, primary_key=True)
@@ -1527,51 +1661,52 @@ class JoinConditionErrorTest(fixtures.TestBase):
(argname, arg[0], type(arg[0])),
configure_mappers)
-
def test_fk_error_not_raised_unrelated(self):
m = MetaData()
t1 = Table('t1', m,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')),
- )
- t2 = Table('t2', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')),
+ )
+ t2 = Table('t2', m, # noqa
+ Column('id', Integer, primary_key=True),
+ )
t3 = Table('t3', m,
- Column('id', Integer, primary_key=True),
- Column('t1id', Integer, ForeignKey('t1.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('t1id', Integer, ForeignKey('t1.id'))
+ )
class C1(object):
pass
+
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2)})
+ mapper(C1, t1, properties={'c2': relationship(C2)})
mapper(C2, t3)
- assert C1.c2.property.primaryjoin.compare(t1.c.id==t3.c.t1id)
+ assert C1.c2.property.primaryjoin.compare(t1.c.id == t3.c.t1id)
def test_join_error_raised(self):
m = MetaData()
t1 = Table('t1', m,
- Column('id', Integer, primary_key=True),
- )
- t2 = Table('t2', m,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
+ t2 = Table('t2', m, # noqa
+ Column('id', Integer, primary_key=True),
+ )
t3 = Table('t3', m,
- Column('id', Integer, primary_key=True),
- Column('t1id', Integer)
- )
+ Column('id', Integer, primary_key=True),
+ Column('t1id', Integer)
+ )
class C1(object):
pass
+
class C2(object):
pass
- mapper(C1, t1, properties={'c2':relationship(C2)})
+ mapper(C1, t1, properties={'c2': relationship(C2)})
mapper(C2, t3)
assert_raises(sa.exc.ArgumentError, configure_mappers)
@@ -1579,7 +1714,9 @@ class JoinConditionErrorTest(fixtures.TestBase):
def teardown(self):
clear_mappers()
+
class TypeMatchTest(fixtures.MappedTest):
+
"""test errors raised when trying to add items
whose type is not handled by a relationship"""
@@ -1587,33 +1724,38 @@ class TypeMatchTest(fixtures.MappedTest):
def define_tables(cls, metadata):
Table("a", metadata,
Column('aid', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('adata', String(30)))
Table("b", metadata,
- Column('bid', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column("a_id", Integer, ForeignKey("a.aid")),
- Column('bdata', String(30)))
+ Column('bid', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("a_id", Integer, ForeignKey("a.aid")),
+ Column('bdata', String(30)))
Table("c", metadata,
Column('cid', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column("b_id", Integer, ForeignKey("b.bid")),
Column('cdata', String(30)))
Table("d", metadata,
Column('did', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column("a_id", Integer, ForeignKey("a.aid")),
Column('ddata', String(30)))
def test_o2m_oncascade(self):
a, c, b = (self.tables.a,
- self.tables.c,
- self.tables.b)
+ self.tables.c,
+ self.tables.b)
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class C(fixtures.BasicEntity): pass
- mapper(A, a, properties={'bs':relationship(B)})
+ class A(fixtures.BasicEntity):
+ pass
+
+ class B(fixtures.BasicEntity):
+ pass
+
+ class C(fixtures.BasicEntity):
+ pass
+ mapper(A, a, properties={'bs': relationship(B)})
mapper(B, b)
mapper(C, c)
@@ -1633,13 +1775,18 @@ class TypeMatchTest(fixtures.MappedTest):
def test_o2m_onflush(self):
a, c, b = (self.tables.a,
- self.tables.c,
- self.tables.b)
+ self.tables.c,
+ self.tables.b)
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class C(fixtures.BasicEntity): pass
- mapper(A, a, properties={'bs':relationship(B, cascade="none")})
+ class A(fixtures.BasicEntity):
+ pass
+
+ class B(fixtures.BasicEntity):
+ pass
+
+ class C(fixtures.BasicEntity):
+ pass
+ mapper(A, a, properties={'bs': relationship(B, cascade="none")})
mapper(B, b)
mapper(C, c)
@@ -1653,18 +1800,23 @@ class TypeMatchTest(fixtures.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
- sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
def test_o2m_nopoly_onflush(self):
a, c, b = (self.tables.a,
- self.tables.c,
- self.tables.b)
+ self.tables.c,
+ self.tables.b)
+
+ class A(fixtures.BasicEntity):
+ pass
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class C(B): pass
- mapper(A, a, properties={'bs':relationship(B, cascade="none")})
+ class B(fixtures.BasicEntity):
+ pass
+
+ class C(B):
+ pass
+ mapper(A, a, properties={'bs': relationship(B, cascade="none")})
mapper(B, b)
mapper(C, c, inherits=B)
@@ -1678,20 +1830,25 @@ class TypeMatchTest(fixtures.MappedTest):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
- sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
def test_m2o_nopoly_onflush(self):
a, b, d = (self.tables.a,
- self.tables.b,
- self.tables.d)
+ self.tables.b,
+ self.tables.d)
+
+ class A(fixtures.BasicEntity):
+ pass
- class A(fixtures.BasicEntity): pass
- class B(A): pass
- class D(fixtures.BasicEntity): pass
+ class B(A):
+ pass
+
+ class D(fixtures.BasicEntity):
+ pass
mapper(A, a)
mapper(B, b, inherits=A)
- mapper(D, d, properties={"a":relationship(A, cascade="none")})
+ mapper(D, d, properties={"a": relationship(A, cascade="none")})
b1 = B()
d1 = D()
d1.a = b1
@@ -1699,27 +1856,33 @@ class TypeMatchTest(fixtures.MappedTest):
sess.add(b1)
sess.add(d1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
- sess.flush)
+ "Attempting to flush an item",
+ sess.flush)
def test_m2o_oncascade(self):
a, b, d = (self.tables.a,
- self.tables.b,
- self.tables.d)
+ self.tables.b,
+ self.tables.d)
- class A(fixtures.BasicEntity): pass
- class B(fixtures.BasicEntity): pass
- class D(fixtures.BasicEntity): pass
+ class A(fixtures.BasicEntity):
+ pass
+
+ class B(fixtures.BasicEntity):
+ pass
+
+ class D(fixtures.BasicEntity):
+ pass
mapper(A, a)
mapper(B, b)
- mapper(D, d, properties={"a":relationship(A)})
+ mapper(D, d, properties={"a": relationship(A)})
b1 = B()
d1 = D()
d1.a = b1
sess = create_session()
assert_raises_message(AssertionError,
- "doesn't handle objects of type",
- sess.add, d1)
+ "doesn't handle objects of type",
+ sess.add, d1)
+
class TypedAssociationTable(fixtures.MappedTest):
@@ -1727,8 +1890,10 @@ class TypedAssociationTable(fixtures.MappedTest):
def define_tables(cls, metadata):
class MySpecialType(sa.types.TypeDecorator):
impl = String
+
def process_bind_param(self, value, dialect):
return "lala" + value
+
def process_result_value(self, value, dialect):
return value[4:]
@@ -1746,15 +1911,17 @@ class TypedAssociationTable(fixtures.MappedTest):
"""Many-to-many tables with special types for candidate keys."""
t2, t3, t1 = (self.tables.t2,
- self.tables.t3,
- self.tables.t1)
+ self.tables.t3,
+ self.tables.t1)
+ class T1(fixtures.BasicEntity):
+ pass
- class T1(fixtures.BasicEntity): pass
- class T2(fixtures.BasicEntity): pass
+ class T2(fixtures.BasicEntity):
+ pass
mapper(T2, t2)
mapper(T1, t1, properties={
- 't2s':relationship(T2, secondary=t3, backref='t1s')})
+ 't2s': relationship(T2, secondary=t3, backref='t1s')})
a = T1()
a.col1 = "aid"
@@ -1775,7 +1942,9 @@ class TypedAssociationTable(fixtures.MappedTest):
assert t3.count().scalar() == 1
+
class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL):
+
"""test op() in conjunction with join conditions"""
run_create_tables = run_deletes = None
@@ -1785,47 +1954,50 @@ class CustomOperatorTest(fixtures.MappedTest, AssertsCompiledSQL):
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo', String(50))
+ )
Table('b', metadata,
- Column('id', Integer, primary_key=True),
- Column('foo', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('foo', String(50))
+ )
def test_join_on_custom_op(self):
class A(fixtures.BasicEntity):
pass
+
class B(fixtures.BasicEntity):
pass
mapper(A, self.tables.a, properties={
- 'bs': relationship(B,
- primaryjoin=self.tables.a.c.foo.op(
- '&*', is_comparison=True
- )(foreign(self.tables.b.c.foo)),
- viewonly=True
- )
- })
+ 'bs': relationship(B,
+ primaryjoin=self.tables.a.c.foo.op(
+ '&*', is_comparison=True
+ )(foreign(self.tables.b.c.foo)),
+ viewonly=True
+ )
+ })
mapper(B, self.tables.b)
self.assert_compile(
Session().query(A).join(A.bs),
- "SELECT a.id AS a_id, a.foo AS a_foo FROM a JOIN b ON a.foo &* b.foo"
+ "SELECT a.id AS a_id, a.foo AS a_foo "
+ "FROM a JOIN b ON a.foo &* b.foo"
)
class ViewOnlyHistoryTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t1id', Integer, ForeignKey('t1.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
def _assert_fk(self, a1, b1, is_set):
s = Session(testing.db)
@@ -1842,12 +2014,13 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_o2m_viewonly_oneside(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, self.tables.t1, properties={
"bs": relationship(B, viewonly=True,
- backref=backref("a", viewonly=False))
+ backref=backref("a", viewonly=False))
})
mapper(B, self.tables.t2)
@@ -1867,12 +2040,13 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_m2o_viewonly_oneside(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, self.tables.t1, properties={
"bs": relationship(B, viewonly=False,
- backref=backref("a", viewonly=True))
+ backref=backref("a", viewonly=True))
})
mapper(B, self.tables.t2)
@@ -1892,6 +2066,7 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_o2m_viewonly_only(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
@@ -1910,13 +2085,14 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
def test_m2o_viewonly_only(self):
class A(fixtures.ComparableEntity):
pass
+
class B(fixtures.ComparableEntity):
pass
mapper(A, self.tables.t1)
mapper(B, self.tables.t2, properties={
'a': relationship(A, viewonly=True)
- })
+ })
a1 = A()
b1 = B()
@@ -1925,34 +2101,39 @@ class ViewOnlyHistoryTest(fixtures.MappedTest):
self._assert_fk(a1, b1, False)
+
class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ )
Table("t1t2", metadata,
- Column('t1id', Integer, ForeignKey('t1.id'), primary_key=True),
- Column('t2id', Integer, ForeignKey('t2.id'), primary_key=True),
- )
+ Column('t1id', Integer, ForeignKey('t1.id'), primary_key=True),
+ Column('t2id', Integer, ForeignKey('t2.id'), primary_key=True),
+ )
def test_viewonly(self):
t1t2, t2, t1 = (self.tables.t1t2,
- self.tables.t2,
- self.tables.t1)
+ self.tables.t2,
+ self.tables.t1)
- class A(fixtures.ComparableEntity):pass
- class B(fixtures.ComparableEntity):pass
+ class A(fixtures.ComparableEntity):
+ pass
+
+ class B(fixtures.ComparableEntity):
+ pass
mapper(A, t1, properties={
- 'bs':relationship(B, secondary=t1t2,
- backref=backref('as_', viewonly=True))
+ 'bs': relationship(B, secondary=t1t2,
+ backref=backref('as_', viewonly=True))
})
mapper(B, t2)
@@ -1971,25 +2152,27 @@ class ViewOnlyM2MBackrefTest(fixtures.MappedTest):
sess.query(B).first(), B(as_=[A(id=a1.id)])
)
+
class ViewOnlyOverlappingNames(fixtures.MappedTest):
+
"""'viewonly' mappings with overlapping PK column names."""
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t1id', Integer, ForeignKey('t1.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
Table("t3", metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t2id', Integer, ForeignKey('t2.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t2id', Integer, ForeignKey('t2.id')))
def test_three_table_view(self):
"""A three table join with overlapping PK names.
@@ -2001,23 +2184,29 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
"""
t2, t3, t1 = (self.tables.t2,
- self.tables.t3,
- self.tables.t1)
+ self.tables.t3,
+ self.tables.t1)
+
+ class C1(fixtures.BasicEntity):
+ pass
+
+ class C2(fixtures.BasicEntity):
+ pass
- class C1(fixtures.BasicEntity): pass
- class C2(fixtures.BasicEntity): pass
- class C3(fixtures.BasicEntity): pass
+ class C3(fixtures.BasicEntity):
+ pass
mapper(C1, t1, properties={
- 't2s':relationship(C2),
- 't2_view':relationship(C2,
- viewonly=True,
- primaryjoin=sa.and_(t1.c.id==t2.c.t1id,
- t3.c.t2id==t2.c.id,
- t3.c.data==t1.c.data))})
+ 't2s': relationship(C2),
+ 't2_view': relationship(
+ C2,
+ viewonly=True,
+ primaryjoin=sa.and_(t1.c.id == t2.c.t1id,
+ t3.c.t2id == t2.c.id,
+ t3.c.data == t1.c.data))})
mapper(C2, t2)
mapper(C3, t3, properties={
- 't2':relationship(C2)})
+ 't2': relationship(C2)})
c1 = C1()
c1.data = 'c1data'
@@ -2026,7 +2215,7 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
c2b = C2()
c1.t2s.append(c2b)
c3 = C3()
- c3.data='c1data'
+ c3.data = 'c1data'
c3.t2 = c2b
sess = create_session()
sess.add(c1)
@@ -2038,25 +2227,27 @@ class ViewOnlyOverlappingNames(fixtures.MappedTest):
assert set([x.id for x in c1.t2s]) == set([c2a.id, c2b.id])
assert set([x.id for x in c1.t2_view]) == set([c2b.id])
+
class ViewOnlyUniqueNames(fixtures.MappedTest):
+
"""'viewonly' mappings with unique PK column names."""
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('t1id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)))
+ Column('t1id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)))
Table("t2", metadata,
- Column('t2id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
+ Column('t2id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
Table("t3", metadata,
- Column('t3id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(40)),
- Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
+ Column('t3id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(40)),
+ Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
def test_three_table_view(self):
"""A three table join with overlapping PK names.
@@ -2067,23 +2258,29 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
"""
t2, t3, t1 = (self.tables.t2,
- self.tables.t3,
- self.tables.t1)
+ self.tables.t3,
+ self.tables.t1)
+
+ class C1(fixtures.BasicEntity):
+ pass
+
+ class C2(fixtures.BasicEntity):
+ pass
- class C1(fixtures.BasicEntity): pass
- class C2(fixtures.BasicEntity): pass
- class C3(fixtures.BasicEntity): pass
+ class C3(fixtures.BasicEntity):
+ pass
mapper(C1, t1, properties={
- 't2s':relationship(C2),
- 't2_view':relationship(C2,
- viewonly=True,
- primaryjoin=sa.and_(t1.c.t1id==t2.c.t1id_ref,
- t3.c.t2id_ref==t2.c.t2id,
- t3.c.data==t1.c.data))})
+ 't2s': relationship(C2),
+ 't2_view': relationship(
+ C2,
+ viewonly=True,
+ primaryjoin=sa.and_(t1.c.t1id == t2.c.t1id_ref,
+ t3.c.t2id_ref == t2.c.t2id,
+ t3.c.data == t1.c.data))})
mapper(C2, t2)
mapper(C3, t3, properties={
- 't2':relationship(C2)})
+ 't2': relationship(C2)})
c1 = C1()
c1.data = 'c1data'
@@ -2092,7 +2289,7 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
c2b = C2()
c1.t2s.append(c2b)
c3 = C3()
- c3.data='c1data'
+ c3.data = 'c1data'
c3.t2 = c2b
sess = create_session()
@@ -2104,30 +2301,35 @@ class ViewOnlyUniqueNames(fixtures.MappedTest):
assert set([x.t2id for x in c1.t2s]) == set([c2a.t2id, c2b.t2id])
assert set([x.t2id for x in c1.t2_view]) == set([c2b.t2id])
+
class ViewOnlyLocalRemoteM2M(fixtures.TestBase):
+
"""test that local-remote is correctly determined for m2m"""
def test_local_remote(self):
meta = MetaData()
t1 = Table('t1', meta,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
t2 = Table('t2', meta,
- Column('id', Integer, primary_key=True),
- )
+ Column('id', Integer, primary_key=True),
+ )
t12 = Table('tab', meta,
- Column('t1_id', Integer, ForeignKey('t1.id',)),
- Column('t2_id', Integer, ForeignKey('t2.id',)),
- )
+ Column('t1_id', Integer, ForeignKey('t1.id',)),
+ Column('t2_id', Integer, ForeignKey('t2.id',)),
+ )
- class A(object): pass
- class B(object): pass
- mapper( B, t2, )
- m = mapper( A, t1, properties=dict(
- b_view = relationship( B, secondary=t12, viewonly=True),
- b_plain= relationship( B, secondary=t12),
- )
+ class A(object):
+ pass
+
+ class B(object):
+ pass
+ mapper(B, t2, )
+ m = mapper(A, t1, properties=dict(
+ b_view=relationship(B, secondary=t12, viewonly=True),
+ b_plain=relationship(B, secondary=t12),
+ )
)
configure_mappers()
assert m.get_property('b_view').local_remote_pairs == \
@@ -2135,31 +2337,32 @@ class ViewOnlyLocalRemoteM2M(fixtures.TestBase):
[(t1.c.id, t12.c.t1_id), (t2.c.id, t12.c.t2_id)]
-
class ViewOnlyNonEquijoin(fixtures.MappedTest):
+
"""'viewonly' mappings based on non-equijoins."""
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
- Column('id', Integer, primary_key=True))
+ Column('id', Integer, primary_key=True))
Table('bars', metadata,
- Column('id', Integer, primary_key=True),
- Column('fid', Integer))
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer))
def test_viewonly_join(self):
bars, foos = self.tables.bars, self.tables.foos
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(fixtures.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id > bars.c.fid,
- foreign_keys=[bars.c.fid],
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ foreign_keys=[bars.c.fid],
+ viewonly=True)})
mapper(Bar, bars)
@@ -2180,17 +2383,22 @@ class ViewOnlyNonEquijoin(fixtures.MappedTest):
class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
+
"""'viewonly' mappings that contain the same 'remote' column twice"""
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('bid1', Integer,ForeignKey('bars.id')),
- Column('bid2', Integer,ForeignKey('bars.id')))
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('bid1', Integer, ForeignKey('bars.id')),
+ Column('bid2', Integer, ForeignKey('bars.id')))
Table('bars', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('data', String(50)))
def test_relationship_on_or(self):
@@ -2198,15 +2406,16 @@ class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(fixtures.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=sa.or_(bars.c.id == foos.c.bid1,
- bars.c.id == foos.c.bid2),
- uselist=True,
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=sa.or_(bars.c.id == foos.c.bid1,
+ bars.c.id == foos.c.bid2),
+ uselist=True,
+ viewonly=True)})
mapper(Bar, bars)
sess = create_session()
@@ -2228,18 +2437,20 @@ class ViewOnlyRepeatedRemoteColumn(fixtures.MappedTest):
eq_(sess.query(Foo).filter_by(id=f2.id).one(),
Foo(bars=[Bar(data='b3')]))
+
class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
+
"""'viewonly' mappings that contain the same 'local' column twice"""
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('data', String(50)))
Table('bars', metadata, Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('fid1', Integer, ForeignKey('foos.id')),
Column('fid2', Integer, ForeignKey('foos.id')),
Column('data', String(50)))
@@ -2249,14 +2460,15 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
class Foo(fixtures.ComparableEntity):
pass
+
class Bar(fixtures.ComparableEntity):
pass
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=sa.or_(bars.c.fid1 == foos.c.id,
- bars.c.fid2 == foos.c.id),
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=sa.or_(bars.c.fid1 == foos.c.id,
+ bars.c.fid2 == foos.c.id),
+ viewonly=True)})
mapper(Bar, bars)
sess = create_session()
@@ -2279,57 +2491,61 @@ class ViewOnlyRepeatedLocalColumn(fixtures.MappedTest):
eq_(sess.query(Foo).filter_by(id=f2.id).one(),
Foo(bars=[Bar(data='b3'), Bar(data='b4')]))
+
class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
+
"""'viewonly' mappings with a complex join condition."""
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)),
- Column('t1id', Integer, ForeignKey('t1.id')))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('t1id', Integer, ForeignKey('t1.id')))
Table('t3', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('t2tot3', metadata,
- Column('t2id', Integer, ForeignKey('t2.id')),
- Column('t3id', Integer, ForeignKey('t3.id')))
+ Column('t2id', Integer, ForeignKey('t2.id')),
+ Column('t3id', Integer, ForeignKey('t3.id')))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
+
class T2(cls.Comparable):
pass
+
class T3(cls.Comparable):
pass
def test_basic(self):
T1, t2, T2, T3, t3, t2tot3, t1 = (self.classes.T1,
- self.tables.t2,
- self.classes.T2,
- self.classes.T3,
- self.tables.t3,
- self.tables.t2tot3,
- self.tables.t1)
+ self.tables.t2,
+ self.classes.T2,
+ self.classes.T3,
+ self.tables.t3,
+ self.tables.t2tot3,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't3s':relationship(T3, primaryjoin=sa.and_(
- t1.c.id==t2.c.t1id,
- t2.c.id==t2tot3.c.t2id,
- t3.c.id==t2tot3.c.t3id),
- viewonly=True,
- foreign_keys=t3.c.id, remote_side=t2.c.t1id)
+ 't3s': relationship(T3, primaryjoin=sa.and_(
+ t1.c.id == t2.c.t1id,
+ t2.c.id == t2tot3.c.t2id,
+ t3.c.id == t2tot3.c.t3id),
+ viewonly=True,
+ foreign_keys=t3.c.id, remote_side=t2.c.t1id)
})
mapper(T2, t2, properties={
- 't1':relationship(T1),
- 't3s':relationship(T3, secondary=t2tot3)
+ 't1': relationship(T1),
+ 't3s': relationship(T3, secondary=t2tot3)
})
mapper(T3, t3)
@@ -2341,31 +2557,32 @@ class ViewOnlyComplexJoin(_RelationshipErrors, fixtures.MappedTest):
a = sess.query(T1).first()
eq_(a.t3s, [T3(data='t3')])
-
def test_remote_side_escalation(self):
T1, t2, T2, T3, t3, t2tot3, t1 = (self.classes.T1,
- self.tables.t2,
- self.classes.T2,
- self.classes.T3,
- self.tables.t3,
- self.tables.t2tot3,
- self.tables.t1)
+ self.tables.t2,
+ self.classes.T2,
+ self.classes.T3,
+ self.tables.t3,
+ self.tables.t2tot3,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't3s':relationship(T3,
- primaryjoin=sa.and_(t1.c.id==t2.c.t1id,
- t2.c.id==t2tot3.c.t2id,
- t3.c.id==t2tot3.c.t3id
- ),
- viewonly=True,
- foreign_keys=t3.c.id)})
+ 't3s': relationship(T3,
+ primaryjoin=sa.and_(t1.c.id == t2.c.t1id,
+ t2.c.id == t2tot3.c.t2id,
+ t3.c.id == t2tot3.c.t3id
+ ),
+ viewonly=True,
+ foreign_keys=t3.c.id)})
mapper(T2, t2, properties={
- 't1':relationship(T1),
- 't3s':relationship(T3, secondary=t2tot3)})
+ 't1': relationship(T1),
+ 't3s': relationship(T3, secondary=t2tot3)})
mapper(T3, t3)
self._assert_raises_no_local_remote(configure_mappers, "T1.t3s")
+
class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
+
"""test a complex annotation using between().
Using declarative here as an integration test for the local()
@@ -2381,23 +2598,23 @@ class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
__tablename__ = "network"
id = Column(sa.Integer, primary_key=True,
- test_needs_autoincrement=True)
+ test_needs_autoincrement=True)
ip_net_addr = Column(Integer)
ip_broadcast_addr = Column(Integer)
- addresses = relationship("Address",
- primaryjoin="remote(foreign(Address.ip_addr)).between("
- "Network.ip_net_addr,"
- "Network.ip_broadcast_addr)",
- viewonly=True
- )
+ addresses = relationship(
+ "Address",
+ primaryjoin="remote(foreign(Address.ip_addr)).between("
+ "Network.ip_net_addr,"
+ "Network.ip_broadcast_addr)",
+ viewonly=True
+ )
class Address(fixtures.ComparableEntity, Base):
__tablename__ = "address"
ip_addr = Column(Integer, primary_key=True)
-
@classmethod
def insert_data(cls):
Network, Address = cls.classes.Network, cls.classes.Address
@@ -2417,11 +2634,11 @@ class RemoteForeignBetweenColsTest(fixtures.DeclarativeMappedTest):
session = Session(testing.db)
eq_(
- session.query(Address.ip_addr).\
- select_from(Network).\
- join(Network.addresses).\
- filter(Network.ip_net_addr == 15).\
- all(),
+ session.query(Address.ip_addr).
+ select_from(Network).
+ join(Network.addresses).
+ filter(Network.ip_net_addr == 15).
+ all(),
[(17, ), (18, )]
)
@@ -2439,59 +2656,61 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', String(50), primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)))
+ Column('id', String(50), primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50)),
- Column('t1id', String(50)))
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50)),
+ Column('t1id', String(50)))
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
+
class T2(cls.Comparable):
pass
def test_onetomany_funcfk_oldstyle(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
# old _local_remote_pairs
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id]
- )
- })
+ 't2s': relationship(
+ T2,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id]
+ )
+ })
mapper(T2, t2)
self._test_onetomany()
def test_onetomany_funcfk_annotated(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
# use annotation
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==
- foreign(sa.func.lower(t2.c.t1id)),
- )})
+ 't2s': relationship(T2,
+ primaryjoin=t1.c.id ==
+ foreign(sa.func.lower(t2.c.t1id)),
+ )})
mapper(T2, t2)
self._test_onetomany()
def _test_onetomany(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
is_(T1.t2s.property.direction, ONETOMANY)
eq_(T1.t2s.property.local_remote_pairs, [(t1.c.id, t2.c.t1id)])
sess = create_session()
@@ -2511,17 +2730,17 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
def test_manytoone_funcfk(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1)
mapper(T2, t2, properties={
- 't1':relationship(T1,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
- foreign_keys=[t2.c.t1id],
- uselist=True)})
+ 't1': relationship(T1,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
+ foreign_keys=[t2.c.t1id],
+ uselist=True)})
sess = create_session()
a1 = T1(id='number1', data='a1')
@@ -2539,15 +2758,16 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
def test_onetomany_func_referent(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id,
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id])})
+ 't2s': relationship(
+ T2,
+ primaryjoin=sa.func.lower(t1.c.id) == t2.c.t1id,
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id])})
mapper(T2, t2)
sess = create_session()
@@ -2562,21 +2782,21 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
eq_(sess.query(T1).first(),
T1(id='NuMbeR1', data='a1', t2s=[
- T2(data='b1', t1id='number1'),
- T2(data='b2', t1id='number1')]))
+ T2(data='b1', t1id='number1'),
+ T2(data='b2', t1id='number1')]))
def test_manytoone_func_referent(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1)
mapper(T2, t2, properties={
- 't1':relationship(T1,
- primaryjoin=sa.func.lower(t1.c.id)==t2.c.t1id,
- _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
- foreign_keys=[t2.c.t1id], uselist=True)})
+ 't1': relationship(T1,
+ primaryjoin=sa.func.lower(t1.c.id) == t2.c.t1id,
+ _local_remote_pairs=[(t2.c.t1id, t1.c.id)],
+ foreign_keys=[t2.c.t1id], uselist=True)})
sess = create_session()
a1 = T1(id='NuMbeR1', data='a1')
@@ -2594,40 +2814,44 @@ class ExplicitLocalRemoteTest(fixtures.MappedTest):
def test_escalation_1(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
- foreign_keys=[t2.c.t1id],
- remote_side=[t2.c.t1id])})
+ 't2s': relationship(
+ T2,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)],
+ foreign_keys=[t2.c.t1id],
+ remote_side=[t2.c.t1id])})
mapper(T2, t2)
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
def test_escalation_2(self):
T2, T1, t2, t1 = (self.classes.T2,
- self.classes.T1,
- self.tables.t2,
- self.tables.t1)
+ self.classes.T1,
+ self.tables.t2,
+ self.tables.t1)
mapper(T1, t1, properties={
- 't2s':relationship(T2,
- primaryjoin=t1.c.id==sa.func.lower(t2.c.t1id),
- _local_remote_pairs=[(t1.c.id, t2.c.t1id)])})
+ 't2s': relationship(
+ T2,
+ primaryjoin=t1.c.id == sa.func.lower(t2.c.t1id),
+ _local_remote_pairs=[(t1.c.id, t2.c.t1id)])})
mapper(T2, t2)
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
+
class InvalidRemoteSideTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50)),
- Column('t_id', Integer, ForeignKey('t1.id'))
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50)),
+ Column('t_id', Integer, ForeignKey('t1.id'))
+ )
@classmethod
def setup_classes(cls):
@@ -2638,10 +2862,11 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, backref='parent')
+ 't1s': relationship(T1, backref='parent')
})
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
r"both of the same direction symbol\('ONETOMANY'\). Did you "
"mean to set remote_side on the many-to-one side ?",
@@ -2651,12 +2876,13 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1,
- backref=backref('parent', remote_side=t1.c.id),
- remote_side=t1.c.id)
+ 't1s': relationship(T1,
+ backref=backref('parent', remote_side=t1.c.id),
+ remote_side=t1.c.id)
})
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
r"both of the same direction symbol\('MANYTOONE'\). Did you "
"mean to set remote_side on the many-to-one side ?",
@@ -2666,12 +2892,13 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, back_populates='parent'),
- 'parent':relationship(T1, back_populates='t1s'),
+ 't1s': relationship(T1, back_populates='parent'),
+ 'parent': relationship(T1, back_populates='t1s'),
})
# can't be sure of ordering here
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
r"both of the same direction symbol\('ONETOMANY'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
@@ -2680,44 +2907,48 @@ class InvalidRemoteSideTest(fixtures.MappedTest):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, back_populates='parent',
+ 't1s': relationship(T1, back_populates='parent',
remote_side=t1.c.id),
- 'parent':relationship(T1, back_populates='t1s',
- remote_side=t1.c.id)
+ 'parent': relationship(T1, back_populates='t1s',
+ remote_side=t1.c.id)
})
# can't be sure of ordering here
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(
+ sa.exc.ArgumentError,
r"both of the same direction symbol\('MANYTOONE'\). Did you "
"mean to set remote_side on the many-to-one side ?",
configure_mappers)
+
class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
- Column('id', Integer, primary_key=True)
- )
+ Column('id', Integer, primary_key=True)
+ )
Table("b", metadata,
- Column('id', Integer, primary_key=True),
- Column('aid_1', Integer, ForeignKey('a.id')),
- Column('aid_2', Integer, ForeignKey('a.id')),
- )
+ Column('id', Integer, primary_key=True),
+ Column('aid_1', Integer, ForeignKey('a.id')),
+ Column('aid_2', Integer, ForeignKey('a.id')),
+ )
Table("atob", metadata,
- Column('aid', Integer),
- Column('bid', Integer),
- )
+ Column('aid', Integer),
+ Column('bid', Integer),
+ )
Table("atob_ambiguous", metadata,
- Column('aid1', Integer, ForeignKey('a.id')),
- Column('bid1', Integer, ForeignKey('b.id')),
- Column('aid2', Integer, ForeignKey('a.id')),
- Column('bid2', Integer, ForeignKey('b.id')),
- )
+ Column('aid1', Integer, ForeignKey('a.id')),
+ Column('bid1', Integer, ForeignKey('b.id')),
+ Column('aid2', Integer, ForeignKey('a.id')),
+ Column('bid2', Integer, ForeignKey('b.id')),
+ )
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
+
class B(cls.Basic):
pass
@@ -2725,7 +2956,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B)
+ 'bs': relationship(B)
})
mapper(B, b)
self._assert_raises_ambig_join(
@@ -2738,12 +2969,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B, foreign_keys=b.c.aid_1)
+ 'bs': relationship(B, foreign_keys=b.c.aid_1)
})
mapper(B, b)
sa.orm.configure_mappers()
assert A.bs.property.primaryjoin.compare(
- a.c.id==b.c.aid_1
+ a.c.id == b.c.aid_1
)
eq_(
A.bs.property._calculated_foreign_keys,
@@ -2754,12 +2985,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B, primaryjoin=a.c.id==b.c.aid_1)
+ 'bs': relationship(B, primaryjoin=a.c.id == b.c.aid_1)
})
mapper(B, b)
sa.orm.configure_mappers()
assert A.bs.property.primaryjoin.compare(
- a.c.id==b.c.aid_1
+ a.c.id == b.c.aid_1
)
eq_(
A.bs.property._calculated_foreign_keys,
@@ -2770,12 +3001,12 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b = self.tables.a, self.tables.b
mapper(A, a, properties={
- 'bs':relationship(B, primaryjoin=a.c.id==foreign(b.c.aid_1))
+ 'bs': relationship(B, primaryjoin=a.c.id == foreign(b.c.aid_1))
})
mapper(B, b)
sa.orm.configure_mappers()
assert A.bs.property.primaryjoin.compare(
- a.c.id==b.c.aid_1
+ a.c.id == b.c.aid_1
)
eq_(
A.bs.property._calculated_foreign_keys,
@@ -2786,7 +3017,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b)
+ 'bs': relationship(B, secondary=a_to_b)
})
mapper(B, b)
self._assert_raises_no_join(
@@ -2798,7 +3029,7 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b)
+ 'bs': relationship(B, secondary=a_to_b)
})
mapper(B, b)
@@ -2808,20 +3039,20 @@ class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
"atob_ambiguous"
)
-
def test_with_fks_m2m(self):
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b,
- foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1])
+ 'bs': relationship(B, secondary=a_to_b,
+ foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1])
})
mapper(B, b)
sa.orm.configure_mappers()
class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
- testing.AssertsExecutionResults):
+ testing.AssertsExecutionResults):
+
"""test support for a relationship where the 'secondary' table is a
compound join().
@@ -2835,35 +3066,44 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
@classmethod
def define_tables(cls, metadata):
- Table('a', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('b_id', ForeignKey('b.id'))
- )
+ Table(
+ 'a', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('b_id', ForeignKey('b.id'))
+ )
Table('b', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('d_id', ForeignKey('d.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('d_id', ForeignKey('d.id'))
+ )
Table('c', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- Column('a_id', ForeignKey('a.id')),
- Column('d_id', ForeignKey('d.id'))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ Column('a_id', ForeignKey('a.id')),
+ Column('d_id', ForeignKey('d.id'))
+ )
Table('d', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('name', String(30)),
+ )
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
+
class B(cls.Comparable):
pass
+
class C(cls.Comparable):
pass
+
class D(cls.Comparable):
pass
@@ -2875,21 +3115,23 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
#j = join(b, d, b.c.d_id == d.c.id).join(c, c.c.d_id == d.c.id).alias()
mapper(A, a, properties={
"b": relationship(B),
- "d": relationship(D, secondary=j,
- primaryjoin=and_(a.c.b_id == b.c.id, a.c.id == c.c.a_id),
- secondaryjoin=d.c.id == b.c.d_id,
- #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id),
- #secondaryjoin=d.c.id == j.c.b_d_id,
- uselist=False
- )
- })
+ "d": relationship(
+ D, secondary=j,
+ primaryjoin=and_(a.c.b_id == b.c.id, a.c.id == c.c.a_id),
+ secondaryjoin=d.c.id == b.c.d_id,
+ #primaryjoin=and_(a.c.b_id == j.c.b_id, a.c.id == j.c.c_a_id),
+ #secondaryjoin=d.c.id == j.c.b_d_id,
+ uselist=False,
+ viewonly=True
+ )
+ })
mapper(B, b, properties={
- "d": relationship(D)
- })
+ "d": relationship(D)
+ })
mapper(C, c, properties={
- "a": relationship(A),
- "d": relationship(D)
- })
+ "a": relationship(A),
+ "d": relationship(D)
+ })
mapper(D, d)
@classmethod
@@ -2931,8 +3173,8 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
sess.query(A).join(A.d),
"SELECT a.id AS a_id, a.name AS a_name, a.b_id AS a_b_id "
"FROM a JOIN (b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id "
- "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
- "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
+ "JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
+ "AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
dialect="postgresql"
)
@@ -2944,8 +3186,8 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
"SELECT a.id AS a_id, a.name AS a_name, a.b_id AS a_b_id, "
"d_1.id AS d_1_id, d_1.name AS d_1_name FROM a LEFT OUTER JOIN "
"(b AS b_1 JOIN d AS d_2 ON b_1.d_id = d_2.id JOIN c AS c_1 "
- "ON c_1.d_id = d_2.id JOIN d AS d_1 ON d_1.id = b_1.d_id) "
- "ON a.b_id = b_1.id AND a.id = c_1.a_id",
+ "ON c_1.d_id = d_2.id JOIN d AS d_1 ON d_1.id = b_1.d_id) "
+ "ON a.b_id = b_1.id AND a.id = c_1.a_id",
dialect="postgresql"
)
@@ -2964,14 +3206,15 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
# referring to just the columns wont actually render all those
# join conditions.
self.assert_sql_execution(
- testing.db,
- go,
- CompiledSQL(
- "SELECT d.id AS d_id, d.name AS d_name FROM b "
- "JOIN d ON b.d_id = d.id JOIN c ON c.d_id = d.id "
- "WHERE :param_1 = b.id AND :param_2 = c.a_id AND d.id = b.d_id",
- {'param_1': a1.id, 'param_2': a1.id}
- )
+ testing.db,
+ go,
+ CompiledSQL(
+ "SELECT d.id AS d_id, d.name AS d_name FROM b "
+ "JOIN d ON b.d_id = d.id JOIN c ON c.d_id = d.id "
+ "WHERE :param_1 = b.id AND :param_2 = c.a_id "
+ "AND d.id = b.d_id",
+ {'param_1': a1.id, 'param_2': a1.id}
+ )
)
mapping = {
@@ -2988,7 +3231,6 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
for a, d in sess.query(A, D).outerjoin(A.d):
eq_(self.mapping[a.name], d.name if d is not None else None)
-
def test_joinedload(self):
A, D = self.classes.A, self.classes.D
sess = Session()
@@ -3005,7 +3247,9 @@ class SecondaryNestedJoinTest(fixtures.MappedTest, AssertsCompiledSQL,
d = a.d
eq_(self.mapping[a.name], d.name if d is not None else None)
-class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest):
+
+class InvalidRelationshipEscalationTest(
+ _RelationshipErrors, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -3017,20 +3261,20 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
Column('fid', Integer))
Table('foos_with_fks', metadata,
- Column('id', Integer, primary_key=True),
- Column('fid', Integer, ForeignKey('foos_with_fks.id')))
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer, ForeignKey('foos_with_fks.id')))
Table('bars_with_fks', metadata,
- Column('id', Integer, primary_key=True),
- Column('fid', Integer, ForeignKey('foos_with_fks.id')))
+ Column('id', Integer, primary_key=True),
+ Column('fid', Integer, ForeignKey('foos_with_fks.id')))
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
+
class Bar(cls.Basic):
pass
-
def test_no_join(self):
bars, Foo, Bar, foos = (self.tables.bars,
self.classes.Foo,
@@ -3038,12 +3282,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar)})
+ 'bars': relationship(Bar)})
mapper(Bar, bars)
self._assert_raises_no_join(sa.orm.configure_mappers,
- "Foo.bars", None
- )
+ "Foo.bars", None
+ )
def test_no_join_self_ref(self):
bars, Foo, Bar, foos = (self.tables.bars,
@@ -3052,7 +3296,7 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo)})
+ 'foos': relationship(Foo)})
mapper(Bar, bars)
self._assert_raises_no_join(
@@ -3068,8 +3312,8 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id>bars.c.fid)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3084,9 +3328,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id>bars.c.fid,
- foreign_keys=bars.c.fid)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ foreign_keys=bars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_equality(
sa.orm.configure_mappers,
@@ -3094,25 +3338,27 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
)
def test_no_equated_wo_fks_works_on_relaxed(self):
- foos_with_fks, Foo, Bar, bars_with_fks, foos = (self.tables.foos_with_fks,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos)
+ foos_with_fks, Foo, Bar, bars_with_fks, foos = (
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos)
# very unique - the join between parent/child
# has no fks, but there is an fk join between two other
# tables in the join condition, for those users that try creating
# these big-long-string-of-joining-many-tables primaryjoins.
- # in this case we don't get eq_pairs, but we hit the "works if viewonly"
- # rule. so here we add another clause regarding "try foreign keys".
+ # in this case we don't get eq_pairs, but we hit the
+ # "works if viewonly" rule. so here we add another clause regarding
+ # "try foreign keys".
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=and_(
- bars_with_fks.c.fid==foos_with_fks.c.id,
- foos_with_fks.c.id==foos.c.id,
- )
- )})
+ 'bars': relationship(Bar,
+ primaryjoin=and_(
+ bars_with_fks.c.fid == foos_with_fks.c.id,
+ foos_with_fks.c.id == foos.c.id,
+ )
+ )})
mapper(Bar, bars_with_fks)
self._assert_raises_no_equality(
@@ -3129,9 +3375,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid,
- foreign_keys=[foos.c.id, bars.c.fid])})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid,
+ foreign_keys=[foos.c.id, bars.c.fid])})
mapper(Bar, bars)
self._assert_raises_ambiguous_direction(
@@ -3146,12 +3392,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid,
- foreign_keys=[bars.c.fid],
- remote_side=[foos.c.id, bars.c.fid],
- viewonly=True
- )})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid,
+ foreign_keys=[bars.c.fid],
+ remote_side=[foos.c.id, bars.c.fid],
+ viewonly=True
+ )})
mapper(Bar, bars)
self._assert_raises_no_local_remote(
@@ -3159,7 +3405,6 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
"Foo.bars",
)
-
def test_ambiguous_remoteside_m2o(self):
bars, Foo, Bar, foos = (self.tables.bars,
self.classes.Foo,
@@ -3167,12 +3412,12 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid,
- foreign_keys=[foos.c.id],
- remote_side=[foos.c.id, bars.c.fid],
- viewonly=True
- )})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid,
+ foreign_keys=[foos.c.id],
+ remote_side=[foos.c.id, bars.c.fid],
+ viewonly=True
+ )})
mapper(Bar, bars)
self._assert_raises_no_local_remote(
@@ -3180,7 +3425,6 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
"Foo.bars",
)
-
def test_no_equated_self_ref_no_fks(self):
bars, Foo, Bar, foos = (self.tables.bars,
self.classes.Foo,
@@ -3188,14 +3432,14 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid)})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid)})
mapper(Bar, bars)
- self._assert_raises_no_relevant_fks(configure_mappers,
- "foos.id > foos.fid", "Foo.foos", "primary"
- )
-
+ self._assert_raises_no_relevant_fks(
+ configure_mappers,
+ "foos.id > foos.fid", "Foo.foos", "primary"
+ )
def test_no_equated_self_ref_no_equality(self):
bars, Foo, Bar, foos = (self.tables.bars,
@@ -3204,27 +3448,28 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid,
- foreign_keys=[foos.c.fid])})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid,
+ foreign_keys=[foos.c.fid])})
mapper(Bar, bars)
self._assert_raises_no_equality(configure_mappers,
- "foos.id > foos.fid", "Foo.foos", "primary"
- )
+ "foos.id > foos.fid", "Foo.foos", "primary"
+ )
def test_no_equated_viewonly(self):
- bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos_with_fks,
- self.classes.Foo,
- self.tables.foos)
+ bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id>bars.c.fid,
- viewonly=True)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id > bars.c.fid,
+ viewonly=True)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3234,24 +3479,26 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos_with_fks.c.id>bars_with_fks.c.fid,
- viewonly=True)})
+ 'bars': relationship(
+ Bar,
+ primaryjoin=foos_with_fks.c.id > bars_with_fks.c.fid,
+ viewonly=True)})
mapper(Bar, bars_with_fks)
sa.orm.configure_mappers()
def test_no_equated_self_ref_viewonly(self):
- bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos_with_fks,
- self.classes.Foo,
- self.tables.foos)
+ bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid,
- viewonly=True)})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid,
+ viewonly=True)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3261,9 +3508,10 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos_with_fks.c.id>foos_with_fks.c.fid,
- viewonly=True)})
+ 'foos': relationship(
+ Foo,
+ primaryjoin=foos_with_fks.c.id > foos_with_fks.c.fid,
+ viewonly=True)})
mapper(Bar, bars_with_fks)
sa.orm.configure_mappers()
@@ -3271,25 +3519,26 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
Foo, foos = self.classes.Foo, self.tables.foos
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id>foos.c.fid,
- viewonly=True,
- foreign_keys=[foos.c.fid])})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id > foos.c.fid,
+ viewonly=True,
+ foreign_keys=[foos.c.fid])})
sa.orm.configure_mappers()
eq_(Foo.foos.property.local_remote_pairs, [(foos.c.id, foos.c.fid)])
def test_equated(self):
- bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (self.tables.bars,
- self.classes.Bar,
- self.tables.bars_with_fks,
- self.tables.foos_with_fks,
- self.classes.Foo,
- self.tables.foos)
+ bars, Bar, bars_with_fks, foos_with_fks, Foo, foos = (
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.bars_with_fks,
+ self.tables.foos_with_fks,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos.c.id==bars.c.fid)})
+ 'bars': relationship(Bar,
+ primaryjoin=foos.c.id == bars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
@@ -3299,8 +3548,9 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
sa.orm.clear_mappers()
mapper(Foo, foos_with_fks, properties={
- 'bars':relationship(Bar,
- primaryjoin=foos_with_fks.c.id==bars_with_fks.c.fid)})
+ 'bars': relationship(
+ Bar,
+ primaryjoin=foos_with_fks.c.id == bars_with_fks.c.fid)})
mapper(Bar, bars_with_fks)
sa.orm.configure_mappers()
@@ -3308,24 +3558,23 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
Foo, foos = self.classes.Foo, self.tables.foos
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id==foos.c.fid)})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id == foos.c.fid)})
self._assert_raises_no_relevant_fks(
configure_mappers,
"foos.id = foos.fid", "Foo.foos", "primary"
)
-
def test_equated_self_ref_wrong_fks(self):
bars, Foo, foos = (self.tables.bars,
- self.classes.Foo,
- self.tables.foos)
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'foos':relationship(Foo,
- primaryjoin=foos.c.id==foos.c.fid,
- foreign_keys=[bars.c.id])})
+ 'foos': relationship(Foo,
+ primaryjoin=foos.c.id == foos.c.fid,
+ foreign_keys=[bars.c.id])})
self._assert_raises_no_relevant_fks(
configure_mappers,
@@ -3333,7 +3582,8 @@ class InvalidRelationshipEscalationTest(_RelationshipErrors, fixtures.MappedTest
)
-class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedTest):
+class InvalidRelationshipEscalationTestM2M(
+ _RelationshipErrors, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
@@ -3345,9 +3595,9 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
Column('id', Integer, primary_key=True))
Table('foobars_with_fks', metadata,
- Column('fid', Integer, ForeignKey('foos.id')),
- Column('bid', Integer, ForeignKey('bars.id'))
- )
+ Column('fid', Integer, ForeignKey('foos.id')),
+ Column('bid', Integer, ForeignKey('bars.id'))
+ )
Table('foobars_with_many_columns', metadata,
Column('fid', Integer),
@@ -3362,15 +3612,16 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def setup_classes(cls):
class Foo(cls.Basic):
pass
+
class Bar(cls.Basic):
pass
def test_no_join(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar, secondary=foobars)})
@@ -3384,15 +3635,15 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def test_no_secondaryjoin(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id > foobars.c.fid)})
+ secondary=foobars,
+ primaryjoin=foos.c.id > foobars.c.fid)})
mapper(Bar, bars)
self._assert_raises_no_join(
@@ -3402,17 +3653,18 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
)
def test_no_fks(self):
- foobars_with_many_columns, bars, Bar, foobars, Foo, foos = (self.tables.foobars_with_many_columns,
- self.tables.bars,
- self.classes.Bar,
- self.tables.foobars,
- self.classes.Foo,
- self.tables.foos)
+ foobars_with_many_columns, bars, Bar, foobars, Foo, foos = (
+ self.tables.foobars_with_many_columns,
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.foobars,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar, secondary=foobars,
- primaryjoin=foos.c.id==foobars.c.fid,
- secondaryjoin=foobars.c.bid==bars.c.id)})
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid == bars.c.id)})
mapper(Bar, bars)
sa.orm.configure_mappers()
eq_(
@@ -3426,12 +3678,13 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_many_columns,
- primaryjoin=foos.c.id ==
- foobars_with_many_columns.c.fid,
- secondaryjoin=foobars_with_many_columns.c.bid ==
- bars.c.id)})
+ 'bars': relationship(
+ Bar,
+ secondary=foobars_with_many_columns,
+ primaryjoin=foos.c.id ==
+ foobars_with_many_columns.c.fid,
+ secondaryjoin=foobars_with_many_columns.c.bid ==
+ bars.c.id)})
mapper(Bar, bars)
sa.orm.configure_mappers()
eq_(
@@ -3445,17 +3698,17 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def test_local_col_setup(self):
foobars_with_fks, bars, Bar, Foo, foos = (
- self.tables.foobars_with_fks,
- self.tables.bars,
- self.classes.Bar,
- self.classes.Foo,
- self.tables.foos)
+ self.tables.foobars_with_fks,
+ self.tables.bars,
+ self.classes.Bar,
+ self.classes.Foo,
+ self.tables.foos)
# ensure m2m backref is set up with correct annotations
# [ticket:2578]
mapper(Foo, foos, properties={
'bars': relationship(Bar, secondary=foobars_with_fks, backref="foos")
- })
+ })
mapper(Bar, bars)
sa.orm.configure_mappers()
eq_(
@@ -3467,65 +3720,66 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
set([bars.c.id])
)
-
-
def test_bad_primaryjoin(self):
- foobars_with_fks, bars, Bar, foobars, Foo, foos = (self.tables.foobars_with_fks,
- self.tables.bars,
- self.classes.Bar,
- self.tables.foobars,
- self.classes.Foo,
- self.tables.foos)
+ foobars_with_fks, bars, Bar, foobars, Foo, foos = (
+ self.tables.foobars_with_fks,
+ self.tables.bars,
+ self.classes.Bar,
+ self.tables.foobars,
+ self.classes.Foo,
+ self.tables.foos)
mapper(Foo, foos, properties={
'bars': relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id > foobars.c.fid,
- secondaryjoin=foobars.c.bid<=bars.c.id)})
+ secondary=foobars,
+ primaryjoin=foos.c.id > foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id)})
mapper(Bar, bars)
self._assert_raises_no_equality(
- configure_mappers,
- 'foos.id > foobars.fid',
- "Foo.bars",
- "primary")
+ configure_mappers,
+ 'foos.id > foobars.fid',
+ "Foo.bars",
+ "primary")
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_fks,
- primaryjoin=foos.c.id > foobars_with_fks.c.fid,
- secondaryjoin=foobars_with_fks.c.bid<=bars.c.id)})
+ 'bars': relationship(
+ Bar,
+ secondary=foobars_with_fks,
+ primaryjoin=foos.c.id > foobars_with_fks.c.fid,
+ secondaryjoin=foobars_with_fks.c.bid <= bars.c.id)})
mapper(Bar, bars)
self._assert_raises_no_equality(
- configure_mappers,
- 'foos.id > foobars_with_fks.fid',
- "Foo.bars",
- "primary")
+ configure_mappers,
+ 'foos.id > foobars_with_fks.fid',
+ "Foo.bars",
+ "primary")
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_fks,
- primaryjoin=foos.c.id > foobars_with_fks.c.fid,
- secondaryjoin=foobars_with_fks.c.bid<=bars.c.id,
- viewonly=True)})
+ 'bars': relationship(
+ Bar,
+ secondary=foobars_with_fks,
+ primaryjoin=foos.c.id > foobars_with_fks.c.fid,
+ secondaryjoin=foobars_with_fks.c.bid <= bars.c.id,
+ viewonly=True)})
mapper(Bar, bars)
sa.orm.configure_mappers()
def test_bad_secondaryjoin(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id == foobars.c.fid,
- secondaryjoin=foobars.c.bid <= bars.c.id,
- foreign_keys=[foobars.c.fid])})
+ 'bars': relationship(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id,
+ foreign_keys=[foobars.c.fid])})
mapper(Bar, bars)
self._assert_raises_no_relevant_fks(
configure_mappers,
@@ -3536,17 +3790,17 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
def test_no_equated_secondaryjoin(self):
foobars, bars, Foo, Bar, foos = (self.tables.foobars,
- self.tables.bars,
- self.classes.Foo,
- self.classes.Bar,
- self.tables.foos)
+ self.tables.bars,
+ self.classes.Foo,
+ self.classes.Bar,
+ self.tables.foos)
mapper(Foo, foos, properties={
- 'bars':relationship(Bar,
- secondary=foobars,
- primaryjoin=foos.c.id == foobars.c.fid,
- secondaryjoin=foobars.c.bid <= bars.c.id,
- foreign_keys=[foobars.c.fid, foobars.c.bid])})
+ 'bars': relationship(Bar,
+ secondary=foobars,
+ primaryjoin=foos.c.id == foobars.c.fid,
+ secondaryjoin=foobars.c.bid <= bars.c.id,
+ foreign_keys=[foobars.c.fid, foobars.c.bid])})
mapper(Bar, bars)
self._assert_raises_no_equality(
@@ -3556,6 +3810,7 @@ class InvalidRelationshipEscalationTestM2M(_RelationshipErrors, fixtures.MappedT
"secondary"
)
+
class ActiveHistoryFlagTest(_fixtures.FixtureTest):
run_inserts = None
run_deletes = None
@@ -3572,27 +3827,27 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest):
setattr(obj, attrname, newvalue)
eq_(
attributes.get_history(obj, attrname),
- ([newvalue,], (), [oldvalue,])
+ ([newvalue, ], (), [oldvalue, ])
)
def test_column_property_flag(self):
User, users = self.classes.User, self.tables.users
mapper(User, users, properties={
- 'name':column_property(users.c.name,
- active_history=True)
+ 'name': column_property(users.c.name,
+ active_history=True)
})
u1 = User(name='jack')
self._test_attribute(u1, 'name', 'ed')
def test_relationship_property_flag(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses, properties={
- 'user':relationship(User, active_history=True)
+ 'user': relationship(User, active_history=True)
})
mapper(User, users)
u1 = User(name='jack')
@@ -3604,27 +3859,30 @@ class ActiveHistoryFlagTest(_fixtures.FixtureTest):
Order, orders = self.classes.Order, self.tables.orders
class MyComposite(object):
+
def __init__(self, description, isopen):
self.description = description
self.isopen = isopen
+
def __composite_values__(self):
return [self.description, self.isopen]
+
def __eq__(self, other):
return isinstance(other, MyComposite) and \
other.description == self.description
mapper(Order, orders, properties={
- 'composite':composite(
- MyComposite,
- orders.c.description,
- orders.c.isopen,
- active_history=True)
+ 'composite': composite(
+ MyComposite,
+ orders.c.description,
+ orders.c.isopen,
+ active_history=True)
})
o1 = Order(composite=MyComposite('foo', 1))
self._test_attribute(o1, "composite", MyComposite('bar', 1))
-
class RelationDeprecationTest(fixtures.MappedTest):
+
"""test usage of the old 'relation' function."""
run_inserts = 'once'
@@ -3655,34 +3913,32 @@ class RelationDeprecationTest(fixtures.MappedTest):
def fixtures(cls):
return dict(
users_table=(
- ('id', 'name'),
- (1, 'jack'),
- (2, 'ed'),
- (3, 'fred'),
- (4, 'chuck')),
+ ('id', 'name'),
+ (1, 'jack'),
+ (2, 'ed'),
+ (3, 'fred'),
+ (4, 'chuck')),
addresses_table=(
- ('id', 'user_id', 'email_address', 'purpose', 'bounces'),
- (1, 1, 'jack@jack.home', 'Personal', 0),
- (2, 1, 'jack@jack.bizz', 'Work', 1),
- (3, 2, 'ed@foo.bar', 'Personal', 0),
- (4, 3, 'fred@the.fred', 'Personal', 10)))
+ ('id', 'user_id', 'email_address', 'purpose', 'bounces'),
+ (1, 1, 'jack@jack.home', 'Personal', 0),
+ (2, 1, 'jack@jack.bizz', 'Work', 1),
+ (3, 2, 'ed@foo.bar', 'Personal', 0),
+ (4, 3, 'fred@the.fred', 'Personal', 10)))
def test_relation(self):
- addresses_table, User, users_table, Address = (self.tables.addresses_table,
- self.classes.User,
- self.tables.users_table,
- self.classes.Address)
+ addresses_table, User, users_table, Address = (
+ self.tables.addresses_table,
+ self.classes.User,
+ self.tables.users_table,
+ self.classes.Address)
mapper(User, users_table, properties=dict(
addresses=relation(Address, backref='user'),
- ))
+ ))
mapper(Address, addresses_table)
session = create_session()
- ed = session.query(User).filter(User.addresses.any(
+ session.query(User).filter(User.addresses.any(
Address.email_address == 'ed@foo.bar')).one()
-
-
-
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 74a7a7442..96728612d 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -18,194 +18,6 @@ from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy import event, ForeignKey
-class BindTest(_fixtures.FixtureTest):
- run_inserts = None
-
- def test_mapped_binds(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
-
- # ensure tables are unbound
- m2 = sa.MetaData()
- users_unbound = users.tometadata(m2)
- addresses_unbound = addresses.tometadata(m2)
-
- mapper(Address, addresses_unbound)
- mapper(User, users_unbound, properties={
- 'addresses': relationship(Address,
- backref=backref("user", cascade="all"),
- cascade="all")})
-
- sess = Session(binds={User: self.metadata.bind,
- Address: self.metadata.bind})
-
- u1 = User(id=1, name='ed')
- sess.add(u1)
- eq_(sess.query(User).filter(User.id == 1).all(),
- [User(id=1, name='ed')])
-
- # test expression binding
-
- sess.execute(users_unbound.insert(), params=dict(id=2,
- name='jack'))
- eq_(sess.execute(users_unbound.select(users_unbound.c.id
- == 2)).fetchall(), [(2, 'jack')])
-
- eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
- [(2, 'jack')])
-
- sess.execute(users_unbound.delete())
- eq_(sess.execute(users_unbound.select()).fetchall(), [])
-
- sess.close()
-
- def test_table_binds(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
-
- # ensure tables are unbound
- m2 = sa.MetaData()
- users_unbound = users.tometadata(m2)
- addresses_unbound = addresses.tometadata(m2)
-
- mapper(Address, addresses_unbound)
- mapper(User, users_unbound, properties={
- 'addresses': relationship(Address,
- backref=backref("user", cascade="all"),
- cascade="all")})
-
- Session = sessionmaker(binds={users_unbound: self.metadata.bind,
- addresses_unbound: self.metadata.bind})
- sess = Session()
-
- u1 = User(id=1, name='ed')
- sess.add(u1)
- eq_(sess.query(User).filter(User.id == 1).all(),
- [User(id=1, name='ed')])
-
- sess.execute(users_unbound.insert(), params=dict(id=2, name='jack'))
-
- eq_(sess.execute(users_unbound.select(users_unbound.c.id
- == 2)).fetchall(), [(2, 'jack')])
-
- eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
- [(2, 'jack')])
-
- sess.execute(users_unbound.delete())
- eq_(sess.execute(users_unbound.select()).fetchall(), [])
-
- sess.close()
-
- def test_bind_from_metadata(self):
- users, User = self.tables.users, self.classes.User
-
- mapper(User, users)
-
- session = create_session()
- session.execute(users.insert(), dict(name='Johnny'))
-
- assert len(session.query(User).filter_by(name='Johnny').all()) == 1
-
- session.execute(users.delete())
-
- assert len(session.query(User).filter_by(name='Johnny').all()) == 0
- session.close()
-
- def test_bind_arguments(self):
- users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
- mapper(User, users)
- mapper(Address, addresses)
-
- e1 = engines.testing_engine()
- e2 = engines.testing_engine()
- e3 = engines.testing_engine()
-
- sess = Session(e3)
- sess.bind_mapper(User, e1)
- sess.bind_mapper(Address, e2)
-
- assert sess.connection().engine is e3
- assert sess.connection(bind=e1).engine is e1
- assert sess.connection(mapper=Address, bind=e1).engine is e1
- assert sess.connection(mapper=Address).engine is e2
- assert sess.connection(clause=addresses.select()).engine is e2
- assert sess.connection(mapper=User,
- clause=addresses.select()).engine is e1
- assert sess.connection(mapper=User,
- clause=addresses.select(),
- bind=e2).engine is e2
-
- sess.close()
-
- @engines.close_open_connections
- def test_bound_connection(self):
- users, User = self.tables.users, self.classes.User
-
- mapper(User, users)
- c = testing.db.connect()
- sess = create_session(bind=c)
- sess.begin()
- transaction = sess.transaction
- u = User(name='u1')
- sess.add(u)
- sess.flush()
- assert transaction._connection_for_bind(testing.db) \
- is transaction._connection_for_bind(c) is c
-
- assert_raises_message(sa.exc.InvalidRequestError,
- 'Session already has a Connection '
- 'associated',
- transaction._connection_for_bind,
- testing.db.connect())
- transaction.rollback()
- assert len(sess.query(User).all()) == 0
- sess.close()
-
- def test_bound_connection_transactional(self):
- User, users = self.classes.User, self.tables.users
-
- mapper(User, users)
- c = testing.db.connect()
-
- sess = create_session(bind=c, autocommit=False)
- u = User(name='u1')
- sess.add(u)
- sess.flush()
- sess.close()
- assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 0
-
- sess = create_session(bind=c, autocommit=False)
- u = User(name='u2')
- sess.add(u)
- sess.flush()
- sess.commit()
- assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 1
- c.execute("delete from users")
- assert c.scalar("select count(1) from users") == 0
-
- c = testing.db.connect()
-
- trans = c.begin()
- sess = create_session(bind=c, autocommit=True)
- u = User(name='u3')
- sess.add(u)
- sess.flush()
- assert c.in_transaction()
- trans.commit()
- assert not c.in_transaction()
- assert c.scalar("select count(1) from users") == 1
class ExecutionTest(_fixtures.FixtureTest):
run_inserts = None
@@ -392,6 +204,7 @@ class SessionUtilTest(_fixtures.FixtureTest):
sess.flush()
make_transient(u1)
sess.rollback()
+ assert attributes.instance_state(u1).transient
def test_make_transient_to_detached(self):
users, User = self.tables.users, self.classes.User
@@ -849,7 +662,7 @@ class SessionStateTest(_fixtures.FixtureTest):
go()
eq_(canary, [False])
- def test_deleted_expunged(self):
+ def test_deleted_auto_expunged(self):
users, User = self.tables.users, self.classes.User
mapper(User, users)
@@ -870,6 +683,53 @@ class SessionStateTest(_fixtures.FixtureTest):
assert object_session(u1) is None
+ def test_explicit_expunge_pending(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+ sess = Session()
+ u1 = User(name='x')
+ sess.add(u1)
+
+ sess.flush()
+ sess.expunge(u1)
+
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+ sess.rollback()
+
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+ def test_explicit_expunge_deleted(self):
+ users, User = self.tables.users, self.classes.User
+
+ mapper(User, users)
+ sess = Session()
+ sess.add(User(name='x'))
+ sess.commit()
+
+ u1 = sess.query(User).first()
+ sess.delete(u1)
+
+ sess.flush()
+
+ assert was_deleted(u1)
+ assert u1 not in sess
+ assert object_session(u1) is sess
+
+ sess.expunge(u1)
+ assert was_deleted(u1)
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+ sess.rollback()
+ assert was_deleted(u1)
+ assert u1 not in sess
+ assert object_session(u1) is None
+
+
class SessionStateWFixtureTest(_fixtures.FixtureTest):
__backend__ = True
@@ -1591,14 +1451,19 @@ class SessionInterface(fixtures.TestBase):
eq_(watchdog, instance_methods,
watchdog.symmetric_difference(instance_methods))
- def _test_class_guards(self, user_arg):
+ def _test_class_guards(self, user_arg, is_class=True):
watchdog = set()
def raises_(method, *args, **kw):
watchdog.add(method)
callable_ = getattr(create_session(), method)
- assert_raises(sa.orm.exc.UnmappedClassError,
- callable_, *args, **kw)
+ if is_class:
+ assert_raises(
+ sa.orm.exc.UnmappedClassError,
+ callable_, *args, **kw)
+ else:
+ assert_raises(
+ sa.exc.NoInspectionAvailable, callable_, *args, **kw)
raises_('connection', mapper=user_arg)
@@ -1621,7 +1486,7 @@ class SessionInterface(fixtures.TestBase):
def test_unmapped_primitives(self):
for prim in ('doh', 123, ('t', 'u', 'p', 'l', 'e')):
self._test_instance_guards(prim)
- self._test_class_guards(prim)
+ self._test_class_guards(prim, is_class=False)
def test_unmapped_class_for_instance(self):
class Unmapped(object):
@@ -1645,7 +1510,7 @@ class SessionInterface(fixtures.TestBase):
self._map_it(Mapped)
self._test_instance_guards(early)
- self._test_class_guards(early)
+ self._test_class_guards(early, is_class=False)
class TLTransactionTest(fixtures.MappedTest):
diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py
index 35d527ca8..a3ad37e60 100644
--- a/test/orm/test_update_delete.py
+++ b/test/orm/test_update_delete.py
@@ -1,9 +1,9 @@
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy.testing import fixtures
-from sqlalchemy import Integer, String, ForeignKey, or_, and_, exc, \
- select, func, Boolean, case, text
+from sqlalchemy import Integer, String, ForeignKey, or_, exc, \
+ select, func, Boolean, case, text, column
from sqlalchemy.orm import mapper, relationship, backref, Session, \
- joinedload, aliased
+ joinedload, synonym
from sqlalchemy import testing
from sqlalchemy.testing.schema import Table, Column
@@ -18,7 +18,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(32)),
- Column('age', Integer))
+ Column('age_int', Integer))
@classmethod
def setup_classes(cls):
@@ -30,10 +30,10 @@ class UpdateDeleteTest(fixtures.MappedTest):
users = cls.tables.users
users.insert().execute([
- dict(id=1, name='john', age=25),
- dict(id=2, name='jack', age=47),
- dict(id=3, name='jill', age=29),
- dict(id=4, name='jane', age=37),
+ dict(id=1, name='john', age_int=25),
+ dict(id=2, name='jack', age_int=47),
+ dict(id=3, name='jill', age_int=29),
+ dict(id=4, name='jane', age_int=37),
])
@classmethod
@@ -41,7 +41,9 @@ class UpdateDeleteTest(fixtures.MappedTest):
User = cls.classes.User
users = cls.tables.users
- mapper(User, users)
+ mapper(User, users, properties={
+ 'age': users.c.age_int
+ })
def test_illegal_eval(self):
User = self.classes.User
@@ -70,14 +72,118 @@ class UpdateDeleteTest(fixtures.MappedTest):
):
assert_raises_message(
exc.InvalidRequestError,
- r"Can't call Query.update\(\) when %s\(\) has been called" % mname,
+ r"Can't call Query.update\(\) when "
+ "%s\(\) has been called" % mname,
q.update,
{'name': 'ed'})
assert_raises_message(
exc.InvalidRequestError,
- r"Can't call Query.delete\(\) when %s\(\) has been called" % mname,
+ r"Can't call Query.delete\(\) when "
+ "%s\(\) has been called" % mname,
q.delete)
+ def test_evaluate_clauseelement(self):
+ User = self.classes.User
+
+ class Thing(object):
+ def __clause_element__(self):
+ return User.name.__clause_element__()
+
+ s = Session()
+ jill = s.query(User).get(3)
+ s.query(User).update(
+ {Thing(): 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.name, 'moonbeam')
+
+ def test_evaluate_invalid(self):
+ User = self.classes.User
+
+ class Thing(object):
+ def __clause_element__(self):
+ return 5
+
+ s = Session()
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Invalid expression type: 5",
+ s.query(User).update, {Thing(): 'moonbeam'},
+ synchronize_session='evaluate'
+ )
+
+ def test_evaluate_unmapped_col(self):
+ User = self.classes.User
+
+ s = Session()
+ jill = s.query(User).get(3)
+ s.query(User).update(
+ {column('name'): 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.name, 'jill')
+ s.expire(jill)
+ eq_(jill.name, 'moonbeam')
+
+ def test_evaluate_synonym_string(self):
+ class Foo(object):
+ pass
+ mapper(Foo, self.tables.users, properties={
+ 'uname': synonym("name", )
+ })
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {'uname': 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.uname, 'moonbeam')
+
+ def test_evaluate_synonym_attr(self):
+ class Foo(object):
+ pass
+ mapper(Foo, self.tables.users, properties={
+ 'uname': synonym("name", )
+ })
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {Foo.uname: 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.uname, 'moonbeam')
+
+ def test_evaluate_double_synonym_attr(self):
+ class Foo(object):
+ pass
+ mapper(Foo, self.tables.users, properties={
+ 'uname': synonym("name"),
+ 'ufoo': synonym('uname')
+ })
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {Foo.ufoo: 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.ufoo, 'moonbeam')
+
+ def test_evaluate_hybrid_attr(self):
+ from sqlalchemy.ext.hybrid import hybrid_property
+
+ class Foo(object):
+ @hybrid_property
+ def uname(self):
+ return self.name
+
+ mapper(Foo, self.tables.users)
+
+ s = Session()
+ jill = s.query(Foo).get(3)
+ s.query(Foo).update(
+ {Foo.uname: 'moonbeam'},
+ synchronize_session='evaluate')
+ eq_(jill.uname, 'moonbeam')
+
def test_delete(self):
User = self.classes.User
@@ -116,7 +222,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session='evaluate')
assert john not in sess and jill not in sess
sess.rollback()
@@ -127,7 +234,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session='fetch')
assert john not in sess and jill not in sess
sess.rollback()
@@ -139,7 +247,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session=False)
assert john in sess and jill in sess
@@ -152,7 +261,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(or_(User.name == 'john', User.name == 'jill')).\
+ sess.query(User).filter(
+ or_(User.name == 'john', User.name == 'jill')).\
delete(synchronize_session='fetch')
assert john not in sess and jill not in sess
@@ -202,7 +312,8 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter(User.age > 27).\
update(
- {users.c.age: User.age - 10}, synchronize_session='evaluate')
+ {users.c.age_int: User.age - 10},
+ synchronize_session='evaluate')
eq_([john.age, jack.age, jill.age, jane.age], [25, 27, 19, 27])
eq_(sess.query(User.age).order_by(
User.id).all(), list(zip([25, 27, 19, 27])))
@@ -213,12 +324,25 @@ class UpdateDeleteTest(fixtures.MappedTest):
eq_(sess.query(User.age).order_by(
User.id).all(), list(zip([15, 27, 19, 27])))
+ def test_update_against_table_col(self):
+ User, users = self.classes.User, self.tables.users
+
+ sess = Session()
+ john, jack, jill, jane = sess.query(User).order_by(User.id).all()
+ eq_([john.age, jack.age, jill.age, jane.age], [25, 47, 29, 37])
+ sess.query(User).filter(User.age > 27).\
+ update(
+ {users.c.age_int: User.age - 10},
+ synchronize_session='evaluate')
+ eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 19, 27])
+
def test_update_against_metadata(self):
User, users = self.classes.User, self.tables.users
sess = Session()
- sess.query(users).update({users.c.age: 29}, synchronize_session=False)
+ sess.query(users).update(
+ {users.c.age_int: 29}, synchronize_session=False)
eq_(sess.query(User.age).order_by(
User.id).all(), list(zip([29, 29, 29, 29])))
@@ -229,7 +353,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).filter(text('age > :x')).params(x=29).\
+ sess.query(User).filter(text('age_int > :x')).params(x=29).\
update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27])
@@ -393,7 +517,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='j2').\
delete(
- synchronize_session='evaluate')
+ synchronize_session='evaluate')
assert john not in sess
def test_autoflush_before_fetch_delete(self):
@@ -405,7 +529,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='j2').\
delete(
- synchronize_session='fetch')
+ synchronize_session='fetch')
assert john not in sess
def test_evaluate_before_update(self):
@@ -447,7 +571,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='john').\
filter_by(age=25).\
delete(
- synchronize_session='evaluate')
+ synchronize_session='evaluate')
assert john not in sess
def test_fetch_before_delete(self):
@@ -460,7 +584,7 @@ class UpdateDeleteTest(fixtures.MappedTest):
sess.query(User).filter_by(name='john').\
filter_by(age=25).\
delete(
- synchronize_session='fetch')
+ synchronize_session='fetch')
assert john not in sess
@@ -540,7 +664,8 @@ class UpdateDeleteIgnoresLoadersTest(fixtures.MappedTest):
sess = Session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
- sess.query(User).options(joinedload(User.documents)).filter(User.age > 29).\
+ sess.query(User).options(
+ joinedload(User.documents)).filter(User.age > 29).\
update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25, 37, 29, 27])
@@ -632,8 +757,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
set([
(1, True), (2, None),
(3, None), (4, True),
- (5, True), (6, None),
- ])
+ (5, True), (6, None)])
)
def test_no_eval_against_multi_table_criteria(self):
@@ -666,8 +790,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
set([
(1, True), (2, None),
(3, None), (4, True),
- (5, True), (6, None),
- ])
+ (5, True), (6, None)])
)
@testing.requires.update_where_target_in_subquery
@@ -690,8 +813,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
set([
(1, True), (2, False),
(3, False), (4, True),
- (5, True), (6, False),
- ])
+ (5, True), (6, False)])
)
@testing.only_on('mysql', 'Multi table update')
@@ -706,8 +828,7 @@ class UpdateDeleteFromTest(fixtures.MappedTest):
filter(User.id == 2).update({
Document.samename: 'd_samename',
User.samename: 'u_samename'
- }, synchronize_session=False
- )
+ }, synchronize_session=False)
eq_(
s.query(User.id, Document.samename, User.samename).
filter(User.id == Document.user_id).
diff --git a/test/profiles.txt b/test/profiles.txt
index 12222b637..dc4d05264 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -1,34 +1,28 @@
# /Users/classic/dev/sqlalchemy/test/profiles.txt
# This file is written out on a per-environment basis.
-# For each test in aaa_profiling, the corresponding function and
+# For each test in aaa_profiling, the corresponding function and
# environment is located within this file. If it doesn't exist,
# the test is skipped.
-# If a callcount does exist, it is compared to what we received.
+# If a callcount does exist, it is compared to what we received.
# assertions are raised if the counts do not match.
-#
-# To add a new callcount test, apply the function_call_count
-# decorator and re-run the tests using the --write-profiles
+#
+# To add a new callcount test, apply the function_call_count
+# decorator and re-run the tests using the --write-profiles
# option - this file will be rewritten including the new count.
-#
+#
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_cextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 74
test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_cextensions 77
@@ -36,22 +30,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_noc
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_cextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_nocextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 152
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 152
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_cextensions 165
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_cextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_nocextensions 165
test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_cextensions 165
@@ -59,22 +47,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_noc
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_cextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_nocextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 186
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 186
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_cextensions 199
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_cextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_nocextensions 199
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_cextensions 199
@@ -82,22 +64,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysql
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_cextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_nocextensions 79
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 79
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 79
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 77
test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_cextensions 80
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_nocextensions 80
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_cextensions 80
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_nocextensions 80
test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_cextensions 78
@@ -105,22 +81,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_noc
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_cextensions 148
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_cextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 148
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_cextensions 148
@@ -134,8 +104,6 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgre
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_nocextensions 4265
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4265
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4260
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_nocextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_nocextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_cextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_nocextensions 4266
@@ -150,8 +118,6 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_nocextensions 6426
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6426
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_nocextensions 6428
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_nocextensions 6428
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_cextensions 6428
test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_nocextensions 6428
@@ -166,8 +132,8 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycop
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 40149
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 19280
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 28297
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_cextensions 107603
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_nocextensions 116606
+
+
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 29138
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 32398
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 37327
@@ -182,8 +148,8 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 30054
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 27144
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 30149
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_cextensions 53281
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_nocextensions 56284
+
+
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 29068
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 32197
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 31179
@@ -198,8 +164,8 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_nocextensions 17988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_nocextensions 18988
+
+
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18988
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18988
@@ -214,8 +180,8 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 122553
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 162315
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 165111
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_cextensions 200102
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_nocextensions 201852
+
+
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 125352
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 169566
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 171364
@@ -230,8 +196,8 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19219
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 22288
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 22530
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_cextensions 24956
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_nocextensions 24936
+
+
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 19492
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23067
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 23271
@@ -246,8 +212,8 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_ce
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1348
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1601
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1626
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_cextensions 2215
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_nocextensions 2230
+
+
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1355
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1656
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1671
@@ -262,8 +228,8 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 117,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 117,18
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 117,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_cextensions 122,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_nocextensions 122,19
+
+
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 122,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 122,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 122,19
@@ -278,8 +244,8 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psy
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 91
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 91
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 91
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_nocextensions 78
+
+
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 78
@@ -294,8 +260,8 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_ps
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_nocextensions 31
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 31
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_nocextensions 24
+
+
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 24
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 24
test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 24
@@ -310,8 +276,8 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_po
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_nocextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 8
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_nocextensions 9
+
+
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 9
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 9
test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 9
@@ -320,22 +286,22 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_po
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_nocextensions 45
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 45
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 45
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_nocextensions 43
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_nocextensions 43
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_cextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_nocextensions 43
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_cextensions 43
@@ -343,22 +309,22 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_nocextensions 80
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 80
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 80
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 80
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_nocextensions 78
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_nocextensions 78
+
+
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_cextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_nocextensions 78
test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_cextensions 78
@@ -366,22 +332,22 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_nocextensions 15
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 15
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_nocextensions 16
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_nocextensions 16
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_cextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_nocextensions 16
test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_cextensions 16
@@ -389,22 +355,22 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_cextensions 92959
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_nocextensions 107979
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 514
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15534
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20501
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35521
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 457
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15477
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_cextensions 109136
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_nocextensions 123136
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 462
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14462
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_cextensions 79876
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_nocextensions 93876
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 462
@@ -412,22 +378,22 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_cextensions 92959
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_nocextensions 107979
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 514
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45534
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20501
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35521
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 457
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15477
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_cextensions 109136
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_nocextensions 123136
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 462
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14462
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_cextensions 79876
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_nocextensions 93876
+
+
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14489
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 462
diff --git a/test/requirements.py b/test/requirements.py
index 21dd2913e..daa20d05a 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -308,6 +308,17 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def temp_table_names(self):
+ """target dialect supports listing of temporary table names"""
+
+ return only_on(['sqlite', 'oracle'])
+
+ @property
+ def temporary_views(self):
+ """target database supports temporary views"""
+ return only_on(['sqlite', 'postgresql'])
+
+ @property
def update_nowait(self):
"""Target database must support SELECT...FOR UPDATE NOWAIT"""
return skip_if(["firebird", "mssql", "mysql", "sqlite", "sybase"],
@@ -421,6 +432,12 @@ class DefaultRequirements(SuiteRequirements):
no_support('sybase', 'FIXME: guessing, needs confirmation'),
no_support('mssql+pymssql', 'no FreeTDS support'),
LambdaPredicate(
+ lambda config: against(config, "mysql+mysqlconnector") and
+ config.db.dialect._mysqlconnector_version_info > (2, 0) and
+ util.py2k,
+ "bug in mysqlconnector 2.0"
+ ),
+ LambdaPredicate(
lambda config: against(config, 'mssql+pyodbc') and
config.db.dialect.freetds and
config.db.dialect.freetds_driver_version < "0.91",
@@ -443,7 +460,7 @@ class DefaultRequirements(SuiteRequirements):
after an insert() construct executes.
"""
return fails_on_everything_except('mysql',
- 'sqlite+pysqlite',
+ 'sqlite+pysqlite', 'sqlite+pysqlcipher',
'sybase', 'mssql')
@property
@@ -460,7 +477,7 @@ class DefaultRequirements(SuiteRequirements):
"""
return skip_if('mssql+pymssql', 'crashes on pymssql') + \
fails_on_everything_except('mysql',
- 'sqlite+pysqlite')
+ 'sqlite+pysqlite', 'sqlite+pysqlcipher')
@property
def sane_multi_rowcount(self):
@@ -717,6 +734,14 @@ class DefaultRequirements(SuiteRequirements):
)
@property
+ def postgresql_test_dblink(self):
+ return skip_if(
+ lambda config: not config.file_config.has_option(
+ 'sqla_testing', 'postgres_test_db_link'),
+ "postgres_test_db_link option not specified in config"
+ )
+
+ @property
def percent_schema_names(self):
return skip_if(
[
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 4f8ced72c..bfafed599 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -238,6 +238,22 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
checkparams=params
)
+ def test_limit_offset_select_literal_binds(self):
+ stmt = select([1]).limit(5).offset(6)
+ self.assert_compile(
+ stmt,
+ "SELECT 1 LIMIT 5 OFFSET 6",
+ literal_binds=True
+ )
+
+ def test_limit_offset_compound_select_literal_binds(self):
+ stmt = select([1]).union(select([2])).limit(5).offset(6)
+ self.assert_compile(
+ stmt,
+ "SELECT 1 UNION SELECT 2 LIMIT 5 OFFSET 6",
+ literal_binds=True
+ )
+
def test_select_precol_compile_ordering(self):
s1 = select([column('x')]).select_from(text('a')).limit(5).as_scalar()
s2 = select([s1]).limit(10)
@@ -2169,6 +2185,27 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT x + foo() OVER () AS anon_1"
)
+ # test a reference to a label that in the referecned selectable;
+ # this resolves
+ expr = (table1.c.myid + 5).label('sum')
+ stmt = select([expr]).alias()
+ self.assert_compile(
+ select([stmt.c.sum, func.row_number().over(order_by=stmt.c.sum)]),
+ "SELECT anon_1.sum, row_number() OVER (ORDER BY anon_1.sum) "
+ "AS anon_2 FROM (SELECT mytable.myid + :myid_1 AS sum "
+ "FROM mytable) AS anon_1"
+ )
+
+ # test a reference to a label that's at the same level as the OVER
+ # in the columns clause; doesn't resolve
+ expr = (table1.c.myid + 5).label('sum')
+ self.assert_compile(
+ select([expr, func.row_number().over(order_by=expr)]),
+ "SELECT mytable.myid + :myid_1 AS sum, "
+ "row_number() OVER "
+ "(ORDER BY mytable.myid + :myid_1) AS anon_1 FROM mytable"
+ )
+
def test_date_between(self):
import datetime
table = Table('dt', metadata,
@@ -2399,6 +2436,23 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=dialect
)
+ def test_statement_hints(self):
+
+ stmt = select([table1.c.myid]).\
+ with_statement_hint("test hint one").\
+ with_statement_hint("test hint two", 'mysql')
+
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid FROM mytable test hint one",
+ )
+
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid FROM mytable test hint one test hint two",
+ dialect='mysql'
+ )
+
def test_literal_as_text_fromstring(self):
self.assert_compile(
and_(text("a"), text("b")),
diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py
index abce600df..10e557b76 100644
--- a/test/sql/test_defaults.py
+++ b/test/sql/test_defaults.py
@@ -14,6 +14,7 @@ from sqlalchemy.dialects import sqlite
from sqlalchemy.testing import fixtures
from sqlalchemy.util import u, b
from sqlalchemy import util
+import itertools
t = f = f2 = ts = currenttime = metadata = default_generator = None
@@ -1278,3 +1279,67 @@ class UnicodeDefaultsTest(fixtures.TestBase):
"foobar", Unicode(32),
default=default
)
+
+
+class InsertFromSelectTest(fixtures.TestBase):
+ __backend__ = True
+
+ def _fixture(self):
+ data = Table(
+ 'data', self.metadata,
+ Column('x', Integer),
+ Column('y', Integer)
+ )
+ data.create()
+ testing.db.execute(data.insert(), {'x': 2, 'y': 5}, {'x': 7, 'y': 12})
+ return data
+
+ @testing.provide_metadata
+ def test_insert_from_select_override_defaults(self):
+ data = self._fixture()
+
+ table = Table('sometable', self.metadata,
+ Column('x', Integer),
+ Column('foo', Integer, default=12),
+ Column('y', Integer))
+
+ table.create()
+
+ sel = select([data.c.x, data.c.y])
+
+ ins = table.insert().\
+ from_select(["x", "y"], sel)
+ testing.db.execute(ins)
+
+ eq_(
+ testing.db.execute(table.select().order_by(table.c.x)).fetchall(),
+ [(2, 12, 5), (7, 12, 12)]
+ )
+
+ @testing.provide_metadata
+ def test_insert_from_select_fn_defaults(self):
+ data = self._fixture()
+
+ counter = itertools.count(1)
+
+ def foo(ctx):
+ return next(counter)
+
+ table = Table('sometable', self.metadata,
+ Column('x', Integer),
+ Column('foo', Integer, default=foo),
+ Column('y', Integer))
+
+ table.create()
+
+ sel = select([data.c.x, data.c.y])
+
+ ins = table.insert().\
+ from_select(["x", "y"], sel)
+ testing.db.execute(ins)
+
+ # counter is only called once!
+ eq_(
+ testing.db.execute(table.select().order_by(table.c.x)).fetchall(),
+ [(2, 1, 5), (7, 1, 12)]
+ )
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index 9b7649e63..ec8d9b5c0 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -1,7 +1,8 @@
from sqlalchemy.testing import eq_
import datetime
from sqlalchemy import func, select, Integer, literal, DateTime, Table, \
- Column, Sequence, MetaData, extract, Date, String, bindparam
+ Column, Sequence, MetaData, extract, Date, String, bindparam, \
+ literal_column
from sqlalchemy.sql import table, column
from sqlalchemy import sql, util
from sqlalchemy.sql.compiler import BIND_TEMPLATES
@@ -15,6 +16,13 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL, engines
from sqlalchemy.dialects import sqlite, postgresql, mysql, oracle
+table1 = table('mytable',
+ column('myid', Integer),
+ column('name', String),
+ column('description', String),
+ )
+
+
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -367,6 +375,108 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
expr = func.rows("foo").alias('bar')
assert len(expr.c)
+ def test_funcfilter_empty(self):
+ self.assert_compile(
+ func.count(1).filter(),
+ "count(:param_1)"
+ )
+
+ def test_funcfilter_criterion(self):
+ self.assert_compile(
+ func.count(1).filter(
+ table1.c.name != None
+ ),
+ "count(:param_1) FILTER (WHERE mytable.name IS NOT NULL)"
+ )
+
+ def test_funcfilter_compound_criterion(self):
+ self.assert_compile(
+ func.count(1).filter(
+ table1.c.name == None,
+ table1.c.myid > 0
+ ),
+ "count(:param_1) FILTER (WHERE mytable.name IS NULL AND "
+ "mytable.myid > :myid_1)"
+ )
+
+ def test_funcfilter_label(self):
+ self.assert_compile(
+ select([func.count(1).filter(
+ table1.c.description != None
+ ).label('foo')]),
+ "SELECT count(:param_1) FILTER (WHERE mytable.description "
+ "IS NOT NULL) AS foo FROM mytable"
+ )
+
+ def test_funcfilter_fromobj_fromfunc(self):
+ # test from_obj generation.
+ # from func:
+ self.assert_compile(
+ select([
+ func.max(table1.c.name).filter(
+ literal_column('description') != None
+ )
+ ]),
+ "SELECT max(mytable.name) FILTER (WHERE description "
+ "IS NOT NULL) AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_fromobj_fromcriterion(self):
+ # from criterion:
+ self.assert_compile(
+ select([
+ func.count(1).filter(
+ table1.c.name == 'name'
+ )
+ ]),
+ "SELECT count(:param_1) FILTER (WHERE mytable.name = :name_1) "
+ "AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_chaining(self):
+ # test chaining:
+ self.assert_compile(
+ select([
+ func.count(1).filter(
+ table1.c.name == 'name'
+ ).filter(
+ table1.c.description == 'description'
+ )
+ ]),
+ "SELECT count(:param_1) FILTER (WHERE "
+ "mytable.name = :name_1 AND mytable.description = :description_1) "
+ "AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_windowing_orderby(self):
+ # test filtered windowing:
+ self.assert_compile(
+ select([
+ func.rank().filter(
+ table1.c.name > 'foo'
+ ).over(
+ order_by=table1.c.name
+ )
+ ]),
+ "SELECT rank() FILTER (WHERE mytable.name > :name_1) "
+ "OVER (ORDER BY mytable.name) AS anon_1 FROM mytable"
+ )
+
+ def test_funcfilter_windowing_orderby_partitionby(self):
+ self.assert_compile(
+ select([
+ func.rank().filter(
+ table1.c.name > 'foo'
+ ).over(
+ order_by=table1.c.name,
+ partition_by=['description']
+ )
+ ]),
+ "SELECT rank() FILTER (WHERE mytable.name > :name_1) "
+ "OVER (PARTITION BY mytable.description ORDER BY mytable.name) "
+ "AS anon_1 FROM mytable"
+ )
+
class ExecuteTest(fixtures.TestBase):
diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py
index 013ba8082..6044cecb0 100644
--- a/test/sql/test_generative.py
+++ b/test/sql/test_generative.py
@@ -539,6 +539,11 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL):
expr2 = CloningVisitor().traverse(expr)
assert str(expr) == str(expr2)
+ def test_funcfilter(self):
+ expr = func.count(1).filter(t1.c.col1 > 1)
+ expr2 = CloningVisitor().traverse(expr)
+ assert str(expr) == str(expr2)
+
def test_adapt_union(self):
u = union(
t1.select().where(t1.c.col1 == 4),
diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py
index 232c5758b..bd4eaa3e2 100644
--- a/test/sql/test_insert.py
+++ b/test/sql/test_insert.py
@@ -183,7 +183,7 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
- def test_insert_from_select_select_no_defaults(self):
+ def test_insert_from_select_no_defaults(self):
metadata = MetaData()
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
@@ -191,7 +191,7 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
table1 = self.tables.mytable
sel = select([table1.c.myid]).where(table1.c.name == 'foo')
ins = table.insert().\
- from_select(["id"], sel)
+ from_select(["id"], sel, include_defaults=False)
self.assert_compile(
ins,
"INSERT INTO sometable (id) SELECT mytable.myid "
@@ -199,6 +199,84 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
checkparams={"name_1": "foo"}
)
+ def test_insert_from_select_with_sql_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=func.foobar()))
+ table1 = self.tables.mytable
+ sel = select([table1.c.myid]).where(table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, foobar() AS foobar_1 "
+ "FROM mytable WHERE mytable.name = :name_1",
+ checkparams={"name_1": "foo"}
+ )
+
+ def test_insert_from_select_with_python_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=12))
+ table1 = self.tables.mytable
+ sel = select([table1.c.myid]).where(table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, :foo AS anon_1 "
+ "FROM mytable WHERE mytable.name = :name_1",
+ # value filled in at execution time
+ checkparams={"name_1": "foo", "foo": None}
+ )
+
+ def test_insert_from_select_override_defaults(self):
+ metadata = MetaData()
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=12))
+ table1 = self.tables.mytable
+ sel = select(
+ [table1.c.myid, table1.c.myid.label('q')]).where(
+ table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id", "foo"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, mytable.myid AS q "
+ "FROM mytable WHERE mytable.name = :name_1",
+ checkparams={"name_1": "foo"}
+ )
+
+ def test_insert_from_select_fn_defaults(self):
+ metadata = MetaData()
+
+ def foo(ctx):
+ return 12
+
+ table = Table('sometable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo', Integer, default=foo))
+ table1 = self.tables.mytable
+ sel = select(
+ [table1.c.myid]).where(
+ table1.c.name == 'foo')
+ ins = table.insert().\
+ from_select(["id"], sel)
+ self.assert_compile(
+ ins,
+ "INSERT INTO sometable (id, foo) SELECT "
+ "mytable.myid, :foo AS anon_1 "
+ "FROM mytable WHERE mytable.name = :name_1",
+ # value filled in at execution time
+ checkparams={"name_1": "foo", "foo": None}
+ )
+
def test_insert_mix_select_values_exception(self):
table1 = self.tables.mytable
sel = select([table1.c.myid, table1.c.name]).where(
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index a209cdd7a..21eed3abd 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -16,7 +16,7 @@ from sqlalchemy import testing
from sqlalchemy.testing import ComparesTables, AssertsCompiledSQL
from sqlalchemy.testing import eq_, is_, mock
from contextlib import contextmanager
-
+from sqlalchemy import util
class MetaDataTest(fixtures.TestBase, ComparesTables):
@@ -679,6 +679,86 @@ class ToMetaDataTest(fixtures.TestBase, ComparesTables):
eq_(str(table_c.join(table2_c).onclause),
'myschema.mytable.myid = myschema.othertable.myid')
+ def test_change_name_retain_metadata(self):
+ meta = MetaData()
+
+ table = Table('mytable', meta,
+ Column('myid', Integer, primary_key=True),
+ Column('name', String(40), nullable=True),
+ Column('description', String(30),
+ CheckConstraint("description='hi'")),
+ UniqueConstraint('name'),
+ schema='myschema',
+ )
+
+ table2 = table.tometadata(table.metadata, name='newtable')
+ table3 = table.tometadata(table.metadata, schema='newschema',
+ name='newtable')
+
+ assert table.metadata is table2.metadata
+ assert table.metadata is table3.metadata
+ eq_((table.name, table2.name, table3.name),
+ ('mytable', 'newtable', 'newtable'))
+ eq_((table.key, table2.key, table3.key),
+ ('myschema.mytable', 'myschema.newtable', 'newschema.newtable'))
+
+ def test_change_name_change_metadata(self):
+ meta = MetaData()
+ meta2 = MetaData()
+
+ table = Table('mytable', meta,
+ Column('myid', Integer, primary_key=True),
+ Column('name', String(40), nullable=True),
+ Column('description', String(30),
+ CheckConstraint("description='hi'")),
+ UniqueConstraint('name'),
+ schema='myschema',
+ )
+
+ table2 = table.tometadata(meta2, name='newtable')
+
+ assert table.metadata is not table2.metadata
+ eq_((table.name, table2.name),
+ ('mytable', 'newtable'))
+ eq_((table.key, table2.key),
+ ('myschema.mytable', 'myschema.newtable'))
+
+ def test_change_name_selfref_fk_moves(self):
+ meta = MetaData()
+
+ referenced = Table('ref', meta,
+ Column('id', Integer, primary_key=True),
+ )
+ table = Table('mytable', meta,
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', ForeignKey('mytable.id')),
+ Column('ref_id', ForeignKey('ref.id'))
+ )
+
+ table2 = table.tometadata(table.metadata, name='newtable')
+ assert table.metadata is table2.metadata
+ assert table2.c.ref_id.references(referenced.c.id)
+ assert table2.c.parent_id.references(table2.c.id)
+
+ def test_change_name_selfref_fk_moves_w_schema(self):
+ meta = MetaData()
+
+ referenced = Table('ref', meta,
+ Column('id', Integer, primary_key=True),
+ )
+ table = Table('mytable', meta,
+ Column('id', Integer, primary_key=True),
+ Column('parent_id', ForeignKey('mytable.id')),
+ Column('ref_id', ForeignKey('ref.id'))
+ )
+
+ table2 = table.tometadata(
+ table.metadata, name='newtable', schema='newschema')
+ ref2 = referenced.tometadata(table.metadata, schema='newschema')
+ assert table.metadata is table2.metadata
+ assert table2.c.ref_id.references(ref2.c.id)
+ assert table2.c.parent_id.references(table2.c.id)
+
def _assert_fk(self, t2, schema, expected, referred_schema_fn=None):
m2 = MetaData()
existing_schema = t2.schema
@@ -2126,7 +2206,7 @@ class ColumnDefinitionTest(AssertsCompiledSQL, fixtures.TestBase):
assert_raises_message(
exc.ArgumentError,
- "Column object already assigned to Table 't'",
+ "Column object 'x' already assigned to Table 't'",
Table, 'q', MetaData(), c)
def test_incomplete_key(self):
@@ -2707,7 +2787,7 @@ class DialectKWArgTest(fixtures.TestBase):
lambda arg: "goofy_%s" % arg):
with self._fixture():
idx = Index('a', 'b')
- idx.kwargs[u'participating_x'] = 7
+ idx.kwargs[util.u('participating_x')] = 7
eq_(
list(idx.dialect_kwargs),
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index 5c401845b..e8ad88511 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -1,4 +1,4 @@
-from sqlalchemy.testing import fixtures, eq_, is_
+from sqlalchemy.testing import fixtures, eq_, is_, is_not_
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.sql import column, desc, asc, literal, collate, null, true, false
@@ -778,6 +778,25 @@ class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"SELECT x WHERE NOT NULL"
)
+ def test_constant_non_singleton(self):
+ is_not_(null(), null())
+ is_not_(false(), false())
+ is_not_(true(), true())
+
+ def test_constant_render_distinct(self):
+ self.assert_compile(
+ select([null(), null()]),
+ "SELECT NULL AS anon_1, NULL AS anon_2"
+ )
+ self.assert_compile(
+ select([true(), true()]),
+ "SELECT true AS anon_1, true AS anon_2"
+ )
+ self.assert_compile(
+ select([false(), false()]),
+ "SELECT false AS anon_1, false AS anon_2"
+ )
+
class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 430c3fe7c..2f13486eb 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -81,11 +81,10 @@ class QueryTest(fixtures.TestBase):
assert_raises_message(
exc.StatementError,
- r"A value is required for bind parameter 'user_name', in "
+ r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for "
+ "bind parameter 'user_name', in "
"parameter group 2 "
- "\(original cause: (sqlalchemy.exc.)?InvalidRequestError: A "
- "value is required for bind parameter 'user_name', in "
- "parameter group 2\) u?'INSERT INTO query_users",
+ r"\[SQL: u?'INSERT INTO query_users",
users.insert().execute,
{'user_id': 7, 'user_name': 'jack'},
{'user_id': 8, 'user_name': 'ed'},
@@ -295,9 +294,6 @@ class QueryTest(fixtures.TestBase):
l.append(row)
self.assert_(len(l) == 3)
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.requires.subqueries
def test_anonymous_rows(self):
users.insert().execute(
@@ -509,9 +505,6 @@ class QueryTest(fixtures.TestBase):
lambda: row[accessor]
)
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.requires.boolean_col_expressions
def test_or_and_as_columns(self):
true, false = literal(True), literal(False)
@@ -570,9 +563,6 @@ class QueryTest(fixtures.TestBase):
):
eq_(expr.execute().fetchall(), result)
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.requires.mod_operator_as_percent_sign
@testing.emits_warning('.*now automatically escapes.*')
def test_percents_in_text(self):
@@ -623,9 +613,6 @@ class QueryTest(fixtures.TestBase):
c = testing.db.connect()
assert c.execute(s, id=7).fetchall()[0]['user_id'] == 7
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
def test_repeated_bindparams(self):
"""Tests that a BindParam can be used more than once.
@@ -1319,9 +1306,6 @@ class QueryTest(fixtures.TestBase):
# Null values are not outside any set
assert len(r) == 0
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.emits_warning('.*empty sequence.*')
@testing.fails_on('firebird', "uses sql-92 rules")
@testing.fails_on('sybase', "uses sql-92 rules")
@@ -1348,9 +1332,6 @@ class QueryTest(fixtures.TestBase):
r = s.execute(search_key=None).fetchall()
assert len(r) == 0
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
@testing.emits_warning('.*empty sequence.*')
def test_literal_in(self):
"""similar to test_bind_in but use a bind with a value."""
@@ -2510,9 +2491,6 @@ class OperatorTest(fixtures.TestBase):
metadata.drop_all()
# TODO: seems like more tests warranted for this setup.
- @testing.fails_if(
- lambda: util.py3k and testing.against('mysql+mysqlconnector'),
- "bug in mysqlconnector")
def test_modulo(self):
eq_(
select([flds.c.intcol % 3],
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index a3b2b0e93..99d0cbe76 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -5,6 +5,7 @@ from sqlalchemy.testing import eq_, assert_raises, \
from sqlalchemy import *
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, \
AssertsExecutionResults
+from sqlalchemy.sql import elements
from sqlalchemy import testing
from sqlalchemy.sql import util as sql_util, visitors, expression
from sqlalchemy import exc
@@ -1934,6 +1935,29 @@ class AnnotationsTest(fixtures.TestBase):
assert (c2 == 5).left._annotations == {"foo": "bar", "bat": "hoho"}
+class ReprTest(fixtures.TestBase):
+ def test_ensure_repr_elements(self):
+ for obj in [
+ elements.Cast(1, 2),
+ elements.TypeClause(String()),
+ elements.ColumnClause('x'),
+ elements.BindParameter('q'),
+ elements.Null(),
+ elements.True_(),
+ elements.False_(),
+ elements.ClauseList(),
+ elements.BooleanClauseList.and_(),
+ elements.Tuple(),
+ elements.Case([]),
+ elements.Extract('foo', column('x')),
+ elements.UnaryExpression(column('x')),
+ elements.Grouping(column('x')),
+ elements.Over(func.foo()),
+ elements.Label('q', column('x')),
+ ]:
+ repr(obj)
+
+
class WithLabelsTest(fixtures.TestBase):
def _assert_labels_warning(self, s):
diff --git a/tox.ini b/tox.ini
index 304bfd632..668590611 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,10 +1,8 @@
[tox]
-envlist = full
+envlist = full,py26,py27,py33,py34
[testenv]
deps=pytest
- flake8
- coverage
mock
sitepackages=True
@@ -12,7 +10,6 @@ usedevelop=True
commands=
python -m pytest {posargs}
-envdir=pytest
[testenv:full]
@@ -21,22 +18,23 @@ envdir=pytest
setenv=
DISABLE_SQLALCHEMY_CEXT=1
+# see also .coveragerc
+deps=coverage
commands=
- python -m pytest \
- --cov=lib/sqlalchemy \
- --exclude-tag memory-intensive \
- --exclude-tag timing-intensive \
- -k "not aaa_profiling" \
- {posargs}
- python -m coverage xml --include=lib/sqlalchemy/*
+ python -m pytest --cov=sqlalchemy --cov-report term --cov-report xml \
+ --exclude-tag memory-intensive \
+ --exclude-tag timing-intensive \
+ -k "not aaa_profiling" \
+ {posargs}
+
[testenv:pep8]
+deps=flake8
commands = python -m flake8 {posargs}
[flake8]
-
show-source = True
-ignore = E711,E712,E721,F841,F811
+ignore = E711,E712,E721
exclude=.venv,.git,.tox,dist,doc,*egg,build