summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGrzegorz Makarewicz <mak@trisoft.com.pl>2015-10-02 00:23:57 +0200
committerGrzegorz Makarewicz <mak@trisoft.com.pl>2015-10-02 00:23:57 +0200
commita6b78827fc12fdd65ee3f920b881ce398e99ee6c (patch)
tree112cb2ac537bd3a5e191f270b3827611e7ff9aa3
parentb341b988649633f75e750f0919bad77b2720799d (diff)
parentac08920284935e7e7519ce77ba369703390155dc (diff)
downloadsqlalchemy-a6b78827fc12fdd65ee3f920b881ce398e99ee6c.tar.gz
Merge remote-tracking branch 'upstream/master'
-rw-r--r--doc/build/changelog/changelog_09.rst39
-rw-r--r--doc/build/changelog/changelog_10.rst150
-rw-r--r--doc/build/changelog/changelog_11.rst368
-rw-r--r--doc/build/changelog/index.rst4
-rw-r--r--doc/build/changelog/migration_11.rst884
-rw-r--r--doc/build/conf.py6
-rw-r--r--doc/build/core/ddl.rst240
-rw-r--r--doc/build/core/events.rst4
-rw-r--r--doc/build/core/sqlelement.rst12
-rw-r--r--doc/build/core/tutorial.rst35
-rw-r--r--doc/build/core/type_api.rst4
-rw-r--r--doc/build/core/type_basics.rst3
-rw-r--r--doc/build/dialects/postgresql.rst7
-rw-r--r--doc/build/faq/connections.rst81
-rw-r--r--doc/build/faq/sessions.rst71
-rw-r--r--doc/build/glossary.rst23
-rw-r--r--doc/build/index.rst2
-rw-r--r--doc/build/intro.rst59
-rw-r--r--doc/build/orm/events.rst10
-rw-r--r--doc/build/orm/examples.rst2
-rw-r--r--doc/build/orm/inheritance.rst6
-rw-r--r--doc/build/orm/persistence_techniques.rst106
-rw-r--r--doc/build/orm/relationship_persistence.rst122
-rw-r--r--doc/build/orm/session.rst1
-rw-r--r--doc/build/orm/session_events.rst433
-rw-r--r--doc/build/orm/session_state_management.rst108
-rw-r--r--doc/build/orm/session_transaction.rst9
-rw-r--r--doc/build/orm/tutorial.rst67
-rw-r--r--examples/versioned_history/history_meta.py8
-rw-r--r--examples/versioned_history/test_versioning.py65
-rw-r--r--lib/sqlalchemy/__init__.py5
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py106
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py5
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py7
-rw-r--r--lib/sqlalchemy/dialects/postgresql/__init__.py17
-rw-r--r--lib/sqlalchemy/dialects/postgresql/array.py306
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py519
-rw-r--r--lib/sqlalchemy/dialects/postgresql/ext.py (renamed from lib/sqlalchemy/dialects/postgresql/constraints.py)76
-rw-r--r--lib/sqlalchemy/dialects/postgresql/hstore.py278
-rw-r--r--lib/sqlalchemy/dialects/postgresql/json.py400
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py33
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py15
-rw-r--r--lib/sqlalchemy/engine/__init__.py25
-rw-r--r--lib/sqlalchemy/engine/base.py6
-rw-r--r--lib/sqlalchemy/engine/result.py23
-rw-r--r--lib/sqlalchemy/event/attr.py14
-rw-r--r--lib/sqlalchemy/ext/baked.py20
-rw-r--r--lib/sqlalchemy/ext/hybrid.py2
-rw-r--r--lib/sqlalchemy/orm/attributes.py5
-rw-r--r--lib/sqlalchemy/orm/events.py637
-rw-r--r--lib/sqlalchemy/orm/identity.py37
-rw-r--r--lib/sqlalchemy/orm/loading.py20
-rw-r--r--lib/sqlalchemy/orm/mapper.py61
-rw-r--r--lib/sqlalchemy/orm/persistence.py106
-rw-r--r--lib/sqlalchemy/orm/properties.py2
-rw-r--r--lib/sqlalchemy/orm/query.py92
-rw-r--r--lib/sqlalchemy/orm/relationships.py38
-rw-r--r--lib/sqlalchemy/orm/session.py248
-rw-r--r--lib/sqlalchemy/orm/state.py122
-rw-r--r--lib/sqlalchemy/orm/strategies.py8
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py2
-rw-r--r--lib/sqlalchemy/orm/util.py9
-rw-r--r--lib/sqlalchemy/pool.py5
-rw-r--r--lib/sqlalchemy/sql/__init__.py2
-rw-r--r--lib/sqlalchemy/sql/compiler.py100
-rw-r--r--lib/sqlalchemy/sql/crud.py1
-rw-r--r--lib/sqlalchemy/sql/default_comparator.py35
-rw-r--r--lib/sqlalchemy/sql/elements.py450
-rw-r--r--lib/sqlalchemy/sql/expression.py14
-rw-r--r--lib/sqlalchemy/sql/functions.py196
-rw-r--r--lib/sqlalchemy/sql/operators.py58
-rw-r--r--lib/sqlalchemy/sql/selectable.py20
-rw-r--r--lib/sqlalchemy/sql/sqltypes.py306
-rw-r--r--lib/sqlalchemy/sql/type_api.py91
-rw-r--r--lib/sqlalchemy/sql/util.py7
-rw-r--r--lib/sqlalchemy/testing/__init__.py3
-rw-r--r--lib/sqlalchemy/testing/assertions.py10
-rw-r--r--lib/sqlalchemy/testing/distutils_run.py11
-rw-r--r--lib/sqlalchemy/testing/exclusions.py6
-rw-r--r--lib/sqlalchemy/testing/provision.py31
-rw-r--r--lib/sqlalchemy/testing/requirements.py26
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py8
-rw-r--r--lib/sqlalchemy/testing/suite/test_select.py124
-rw-r--r--lib/sqlalchemy/types.py6
-rw-r--r--lib/sqlalchemy/util/langhelpers.py6
-rw-r--r--setup.py173
-rw-r--r--test/aaa_profiling/test_compiler.py4
-rw-r--r--test/base/test_utils.py30
-rw-r--r--test/dialect/mssql/test_compiler.py2
-rw-r--r--test/dialect/mssql/test_query.py16
-rw-r--r--test/dialect/mssql/test_reflection.py38
-rw-r--r--test/dialect/mssql/test_types.py4
-rw-r--r--test/dialect/mysql/test_query.py55
-rw-r--r--test/dialect/postgresql/test_compiler.py52
-rw-r--r--test/dialect/postgresql/test_reflection.py1
-rw-r--r--test/dialect/postgresql/test_types.py556
-rw-r--r--test/dialect/test_sqlite.py45
-rw-r--r--test/engine/test_pool.py95
-rw-r--r--test/ext/test_baked.py49
-rw-r--r--test/orm/test_bulk.py55
-rw-r--r--test/orm/test_events.py537
-rw-r--r--test/orm/test_hasparent.py4
-rw-r--r--test/orm/test_lazy_relations.py75
-rw-r--r--test/orm/test_load_on_fks.py3
-rw-r--r--test/orm/test_mapper.py1248
-rw-r--r--test/orm/test_options.py12
-rw-r--r--test/orm/test_query.py145
-rw-r--r--test/orm/test_session.py105
-rw-r--r--test/orm/test_transaction.py8
-rw-r--r--test/orm/test_unitofworkv2.py212
-rw-r--r--test/profiles.txt58
-rw-r--r--test/requirements.py29
-rw-r--r--test/sql/test_compiler.py75
-rw-r--r--test/sql/test_defaults.py37
-rw-r--r--test/sql/test_functions.py145
-rw-r--r--test/sql/test_insert.py31
-rw-r--r--test/sql/test_insert_exec.py445
-rw-r--r--test/sql/test_operators.py464
-rw-r--r--test/sql/test_query.py1326
-rw-r--r--test/sql/test_resultset.py1136
-rw-r--r--test/sql/test_selectable.py24
-rw-r--r--test/sql/test_types.py192
123 files changed, 11336 insertions, 3841 deletions
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst
index 2d2964ba4..be8872975 100644
--- a/doc/build/changelog/changelog_09.rst
+++ b/doc/build/changelog/changelog_09.rst
@@ -12,7 +12,46 @@
:start-line: 5
.. changelog::
+ :version: 0.9.11
+
+ .. change::
+ :tags: bug, oracle, py3k
+ :tickets: 3491
+ :versions: 1.1.0b1, 1.0.9
+
+ Fixed support for cx_Oracle version 5.2, which was tripping
+ up SQLAlchemy's version detection under Python 3 and inadvertently
+ not using the correct unicode mode for Python 3. This would cause
+ issues such as bound variables mis-interpreted as NULL and rows
+ silently not being returned.
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 3497
+ :versions: 1.0.8
+
+ Fixed critical issue whereby the pool "checkout" event handler
+ may be called against a stale connection without the "connect"
+ event handler having been called, in the case where the pool
+ attempted to reconnect after being invalidated and failed; the stale
+ connection would remain present and would be used on a subsequent
+ attempt. This issue has a greater impact in the 1.0 series subsequent
+ to 1.0.2, as it also delivers a blanked-out ``.info`` dictionary to
+ the event handler; prior to 1.0.2 the ``.info`` dictionary is still
+ the previous one.
+
+.. changelog::
:version: 0.9.10
+ :released: July 22, 2015
+
+ .. change::
+ :tags: bug, sqlite
+ :tickets: 3495
+ :versions: 1.0.8
+
+ Fixed bug in SQLite dialect where reflection of UNIQUE constraints
+ that included non-alphabetic characters in the names, like dots or
+ spaces, would not be reflected with their name.
.. change::
:tags: feature, sql
diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst
index 48626a525..ef125eecf 100644
--- a/doc/build/changelog/changelog_10.rst
+++ b/doc/build/changelog/changelog_10.rst
@@ -16,7 +16,157 @@
:start-line: 5
.. changelog::
+ :version: 1.0.9
+
+ .. change::
+ :tags: feature, orm
+ :versions: 1.1.0b1
+ :pullreq: github:201
+
+ Added new method :meth:`.Query.one_or_none`; same as
+ :meth:`.Query.one` but returns None if no row found. Pull request
+ courtesy esiegerman.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 1.1.0b1
+ :tickets: 3539
+
+ Fixed rare TypeError which could occur when stringifying certain
+ kinds of internal column loader options within internal logging.
+
+ .. change::
+ :tags: bug, orm
+ :versions: 1.1.0b1
+ :tickets: 3525
+
+ Fixed bug in :meth:`.Session.bulk_save_objects` where a mapped
+ column that had some kind of "fetch on update" value and was not
+ locally present in the given object would cause an AttributeError
+ within the operation.
+
+ .. change::
+ :tags: bug, sql
+ :versions: 1.1.0b1
+ :tickets: 3520
+
+ Fixed regression in 1.0-released default-processor for multi-VALUES
+ insert statement, :ticket:`3288`, where the column type for the
+ default-holding column would not be propagated to the compiled
+ statement in the case where the default was being used,
+ leading to bind-level type handlers not being invoked.
+
+ .. change::
+ :tags: bug, examples
+ :versions: 1.1.0b1
+
+ Fixed two issues in the "history_meta" example where history tracking
+ could encounter empty history, and where a column keyed to an alternate
+ attribute name would fail to track properly. Fixes courtesy
+ Alex Fraser.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3510
+ :versions: 1.1.0b1
+
+ Fixed 1.0 regression where the "noload" loader strategy would fail
+ to function for a many-to-one relationship. The loader used an
+ API to place "None" into the dictionary which no longer actually
+ writes a value; this is a side effect of :ticket:`3061`.
+
+ .. change::
+ :tags: bug, sybase
+ :tickets: 3508, 3509
+ :versions: 1.1.0b1
+
+ Fixed two issues regarding Sybase reflection, allowing tables
+ without primary keys to be reflected as well as ensured that
+ a SQL statement involved in foreign key detection is pre-fetched up
+ front to avoid driver issues upon nested queries. Fixes here
+ courtesy Eugene Zapolsky; note that we cannot currently test
+ Sybase to locally verify these changes.
+
+ .. change::
+ :tags: bug, postgresql
+ :pullreq: github:190
+ :versions: 1.1.0b1
+
+ An adjustment to the new Postgresql feature of reflecting storage
+ options and USING of :ticket:`3455` released in 1.0.6,
+ to disable the feature for Postgresql versions < 8.2 where the
+ ``reloptions`` column is not provided; this allows Amazon Redshift
+ to again work as it is based on an 8.0.x version of Postgresql.
+ Fix courtesy Pete Hollobon.
+
+
+.. changelog::
+ :version: 1.0.8
+ :released: July 22, 2015
+
+ .. change::
+ :tags: bug, misc
+ :tickets: 3494
+
+ Fixed an issue where a particular base class within utils
+ didn't implement ``__slots__``, and therefore meant all subclasses
+ of that class didn't either, negating the rationale for ``__slots__``
+ to be in use. Didn't cause any issue except on IronPython
+ which apparently does not implement ``__slots__`` behavior compatibly
+ with cPython.
+
+
+.. changelog::
:version: 1.0.7
+ :released: July 20, 2015
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3459
+ :pullreq: bitbucket:56
+
+ Added a :meth:`.ColumnElement.cast` method which performs the same
+ purpose as the standalone :func:`.cast` function. Pull request
+ courtesy Sebastian Bank.
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 3481
+
+ Fixed regression where new methods on :class:`.ResultProxy` used
+ by the ORM :class:`.Query` object (part of the performance
+ enhancements of :ticket:`3175`) would not raise the "this result
+ does not return rows" exception in the case where the driver
+ (typically MySQL) fails to generate cursor.description correctly;
+ an AttributeError against NoneType would be raised instead.
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 3483
+
+ Fixed regression where :meth:`.ResultProxy.keys` would return
+ un-adjusted internal symbol names for "anonymous" labels, which
+ are the "foo_1" types of labels we see generated for SQL functions
+ without labels and similar. This was a side effect of the
+ performance enhancements implemented as part of #918.
+
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3490
+
+ Fixed bug where coersion of literal ``True`` or ``False`` constant
+ in conjunction with :func:`.and_` or :func:`.or_` would fail
+ with an AttributeError.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3485
+
+ Fixed potential issue where a custom subclass
+ of :class:`.FunctionElement` or other column element that incorrectly
+ states 'None' or any other invalid object as the ``.type``
+ attribute will report this exception instead of recursion overflow.
.. change::
:tags: bug, sql
diff --git a/doc/build/changelog/changelog_11.rst b/doc/build/changelog/changelog_11.rst
new file mode 100644
index 000000000..e376fe191
--- /dev/null
+++ b/doc/build/changelog/changelog_11.rst
@@ -0,0 +1,368 @@
+
+
+==============
+1.1 Changelog
+==============
+
+.. changelog_imports::
+
+ .. include:: changelog_10.rst
+ :start-line: 5
+
+ .. include:: changelog_09.rst
+ :start-line: 5
+
+ .. include:: changelog_08.rst
+ :start-line: 5
+
+ .. include:: changelog_07.rst
+ :start-line: 5
+
+.. changelog::
+ :version: 1.1.0b1
+
+ .. change::
+ :tags: change, sqlite
+ :pullreq: github:198
+
+ Added support to the SQLite dialect for the
+ :meth:`.Inspector.get_schema_names` method to work with SQLite;
+ pull request courtesy Brian Van Klaveren. Also repaired support
+ for creation of indexes with schemas as well as reflection of
+ foreign key constraints in schema-bound tables.
+
+ .. seealso::
+
+ :ref:`change_sqlite_schemas`
+
+ .. change::
+ :tags: change, mssql
+ :tickets: 3434
+
+ The ``legacy_schema_aliasing`` flag, introduced in version 1.0.5
+ as part of :ticket:`3424` to allow disabling of the MSSQL dialect's
+ attempts to create aliases for schema-qualified tables, now defaults
+ to False; the old behavior is now disabled unless explicitly turned on.
+
+ .. seealso::
+
+ :ref:`change_3434`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3250
+
+ Added a new type-level modifier :meth:`.TypeEngine.evaluates_none`
+ which indicates to the ORM that a positive set of None should be
+ persisted as the value NULL, instead of omitting the column from
+ the INSERT statement. This feature is used both as part of the
+ implementation for :ticket:`3514` as well as a standalone feature
+ available on any type.
+
+ .. seealso::
+
+ :ref:`change_3250`
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 2729
+
+ The use of a :class:`.postgresql.ARRAY` object that refers
+ to a :class:`.types.Enum` or :class:`.postgresql.ENUM` subtype
+ will now emit the expected "CREATE TYPE" and "DROP TYPE" DDL when
+ the type is used within a "CREATE TABLE" or "DROP TABLE".
+
+ .. seealso::
+
+ :ref:`change_2729`
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3531
+
+ The :func:`.type_coerce` construct is now a fully fledged Core
+ expression element which is late-evaluated at compile time. Previously,
+ the function was only a conversion function which would handle different
+ expression inputs by returning either a :class:`.Label` of a column-oriented
+ expression or a copy of a given :class:`.BindParameter` object,
+ which in particular prevented the operation from being logically
+ maintained when an ORM-level expression transformation would convert
+ a column to a bound parameter (e.g. for lazy loading).
+
+ .. seealso::
+
+ :ref:`change_3531`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3526
+
+ Internal calls to "bookkeeping" functions within
+ :meth:`.Session.bulk_save_objects` and related bulk methods have
+ been scaled back to the extent that this functionality is not
+ currently used, e.g. checks for column default values to be
+ fetched after an INSERT or UPDATE statement.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2677
+
+ The :class:`.SessionEvents` suite now includes events to allow
+ unambiguous tracking of all object lifecycle state transitions
+ in terms of the :class:`.Session` itself, e.g. pending,
+ transient, persistent, detached. The state of the object
+ within each event is also defined.
+
+ .. seealso::
+
+ :ref:`change_2677`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2677
+
+ Added a new session lifecycle state :term:`deleted`. This new state
+ represents an object that has been deleted from the :term:`persistent`
+ state and will move to the :term:`detached` state once the transaction
+ is committed. This resolves the long-standing issue that objects
+ which were deleted existed in a gray area between persistent and
+ detached. The :attr:`.InstanceState.persistent` accessor will
+ **no longer** report on a deleted object as persistent; the
+ :attr:`.InstanceState.deleted` accessor will instead be True for
+ these objects, until they become detached.
+
+ .. seealso::
+
+ :ref:`change_2677`
+
+ .. change::
+ :tags: change, orm
+ :tickets: 2677
+
+ The :paramref:`.Session.weak_identity_map` parameter is deprecated.
+ See the new recipe at :ref:`session_referencing_behavior` for
+ an event-based approach to maintaining strong identity map behavior.
+
+ .. seealso::
+
+ :ref:`change_2677`
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2919
+
+ The :class:`.TypeDecorator` type extender will now work in conjunction
+ with a :class:`.SchemaType` implementation, typically :class:`.Enum`
+ or :class:`.Boolean` with regards to ensuring that the per-table
+ events are propagated from the implementation type to the outer type.
+ These events are used
+ to ensure that the constraints or Postgresql types (e.g. ENUM)
+ are correctly created (and possibly dropped) along with the parent
+ table.
+
+ .. seealso::
+
+ :ref:`change_2919`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 1370
+
+ Added support for "set-aggregate" functions of the form
+ ``<function> WITHIN GROUP (ORDER BY <criteria>)``, using the
+ method :meth:`.FunctionElement.within_group`. A series of common
+ set-aggregate functions with return types derived from the set have
+ been added. This includes functions like :class:`.percentile_cont`,
+ :class:`.dense_rank` and others.
+
+ .. seealso::
+
+ :ref:`change_3132`
+
+ .. change::
+ :tags: feature, sql, postgresql
+ :tickets: 3132
+
+ Added support for the SQL-standard function :class:`.array_agg`,
+ which automatically returns an :class:`.Array` of the correct type
+ and supports index / slice operations, as well as
+ :func:`.postgresql.array_agg`, which returns a :class:`.postgresql.ARRAY`
+ with additional comparison features. As arrays are only
+ supported on Postgresql at the moment, only actually works on
+ Postgresql. Also added a new construct
+ :class:`.postgresql.aggregate_order_by` in support of PG's
+ "ORDER BY" extension.
+
+ .. seealso::
+
+ :ref:`change_3132`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3516
+
+ Added a new type to core :class:`.types.Array`. This is the
+ base of the PostgreSQL :class:`.ARRAY` type, and is now part of Core
+ to begin supporting various SQL-standard array-supporting features
+ including some functions and eventual support for native arrays
+ on other databases that have an "array" concept, such as DB2 or Oracle.
+ Additionally, new operators :func:`.expression.any_` and
+ :func:`.expression.all_` have been added. These support not just
+ array constructs on Postgresql, but also subqueries that are usable
+ on MySQL (but sadly not on Postgresql).
+
+ .. seealso::
+
+ :ref:`change_3516`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 3321
+
+ Added new checks for the common error case of passing mapped classes
+ or mapped instances into contexts where they are interpreted as
+ SQL bound parameters; a new exception is raised for this.
+
+ .. seealso::
+
+ :ref:`change_3321`
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 3499
+
+ The "hashable" flag on special datatypes such as :class:`.postgresql.ARRAY`,
+ :class:`.postgresql.JSON` and :class:`.postgresql.HSTORE` is now
+ set to False, which allows these types to be fetchable in ORM
+ queries that include entities within the row.
+
+ .. seealso::
+
+ :ref:`change_3499`
+
+ :ref:`change_3499_postgresql`
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 3487
+
+ The Postgresql :class:`.postgresql.ARRAY` type now supports multidimensional
+ indexed access, e.g. expressions such as ``somecol[5][6]`` without
+ any need for explicit casts or type coercions, provided
+ that the :paramref:`.postgresql.ARRAY.dimensions` parameter is set to the
+ desired number of dimensions.
+
+ .. seealso::
+
+ :ref:`change_3503`
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 3503
+
+ The return type for the :class:`.postgresql.JSON` and :class:`.postgresql.JSONB`
+ when using indexed access has been fixed to work like Postgresql itself,
+ and returns an expression that itself is of type :class:`.postgresql.JSON`
+ or :class:`.postgresql.JSONB`. Previously, the accessor would return
+ :class:`.NullType` which disallowed subsequent JSON-like operators to be
+ used.
+
+ .. seealso::
+
+ :ref:`change_3503`
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 3503
+
+ The :class:`.postgresql.JSON`, :class:`.postgresql.JSONB` and
+ :class:`.postgresql.HSTORE` datatypes now allow full control over the
+ return type from an indexed textual access operation, either ``column[someindex].astext``
+ for a JSON type or ``column[someindex]`` for an HSTORE type,
+ via the :paramref:`.postgresql.JSON.astext_type` and
+ :paramref:`.postgresql.HSTORE.text_type` parameters.
+
+ .. seealso::
+
+ :ref:`change_3503`
+
+
+ .. change::
+ :tags: bug, postgresql
+ :tickets: 3503
+
+ The :attr:`.postgresql.JSON.Comparator.astext` modifier no longer
+ calls upon :meth:`.ColumnElement.cast` implicitly, as PG's JSON/JSONB
+ types allow cross-casting between each other as well. Code that
+ makes use of :meth:`.ColumnElement.cast` on JSON indexed access,
+ e.g. ``col[someindex].cast(Integer)``, will need to be changed
+ to call :attr:`.postgresql.JSON.Comparator.astext` explicitly.
+
+ .. seealso::
+
+ :ref:`change_3503_cast`
+
+
+ .. change::
+ :tags: bug, orm, postgresql
+ :tickets: 3514
+
+ Additional fixes have been made regarding the value of ``None``
+ in conjunction with the Postgresql :class:`.JSON` type. When
+ the :paramref:`.JSON.none_as_null` flag is left at its default
+ value of ``False``, the ORM will now correctly insert the Json
+ "'null'" string into the column whenever the value on the ORM
+ object is set to the value ``None`` or when the value ``None``
+ is used with :meth:`.Session.bulk_insert_mappings`,
+ **including** if the column has a default or server default on it.
+
+ .. seealso::
+
+ :ref:`change_3514`
+
+ :ref:`change_3250`
+
+ .. change::
+ :tags: feature, postgresql
+ :tickets: 3514
+
+ Added a new constant :attr:`.postgresql.JSON.NULL`, indicating
+ that the JSON NULL value should be used for a value
+ regardless of other settings.
+
+ .. seealso::
+
+ :ref:`change_3514_jsonnull`
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 2528
+
+ The behavior of the :func:`.union` construct and related constructs
+ such as :meth:`.Query.union` now handle the case where the embedded
+ SELECT statements need to be parenthesized due to the fact that they
+ include LIMIT, OFFSET and/or ORDER BY. These queries **do not work
+ on SQLite**, and will fail on that backend as they did before, but
+ should now work on all other backends.
+
+ .. seealso::
+
+ :ref:`change_2528`
+
+ .. change::
+ :tags: bug, mssql
+ :tickets: 3504
+
+ Fixed issue where the SQL Server dialect would reflect a string-
+ or other variable-length column type with unbounded length
+ by assigning the token ``"max"`` to the
+ length attribute of the string. While using the ``"max"`` token
+ explicitly is supported by the SQL Server dialect, it isn't part
+ of the normal contract of the base string types, and instead the
+ length should just be left as None. The dialect now assigns the
+ length to None on reflection of the type so that the type behaves
+ normally in other contexts.
+
+ .. seealso::
+
+ :ref:`change_3504` \ No newline at end of file
diff --git a/doc/build/changelog/index.rst b/doc/build/changelog/index.rst
index 8c5be99b8..a9f294e87 100644
--- a/doc/build/changelog/index.rst
+++ b/doc/build/changelog/index.rst
@@ -12,7 +12,7 @@ Current Migration Guide
.. toctree::
:titlesonly:
- migration_10
+ migration_11
Change logs
-----------
@@ -20,6 +20,7 @@ Change logs
.. toctree::
:titlesonly:
+ changelog_11
changelog_10
changelog_09
changelog_08
@@ -38,6 +39,7 @@ Older Migration Guides
.. toctree::
:titlesonly:
+ migration_10
migration_09
migration_08
migration_07
diff --git a/doc/build/changelog/migration_11.rst b/doc/build/changelog/migration_11.rst
new file mode 100644
index 000000000..78f77e694
--- /dev/null
+++ b/doc/build/changelog/migration_11.rst
@@ -0,0 +1,884 @@
+==============================
+What's New in SQLAlchemy 1.1?
+==============================
+
+.. admonition:: About this Document
+
+ This document describes changes between SQLAlchemy version 1.0,
+ at the moment the current release series of SQLAlchemy,
+ and SQLAlchemy version 1.1, which is the current development
+ series of SQLAlchemy.
+
+ As the 1.1 series is under development, issues that are targeted
+ at this series can be seen under the
+ `1.1 milestone <https://bitbucket.org/zzzeek/sqlalchemy/issues?milestone=1.1>`_.
+ Please note that the set of issues within the milestone is not fixed;
+ some issues may be moved to later milestones in order to allow
+ for a timely release.
+
+ Document last updated: September 28, 2015
+
+Introduction
+============
+
+This guide introduces what's new in SQLAlchemy version 1.1,
+and also documents changes which affect users migrating
+their applications from the 1.0 series of SQLAlchemy to 1.1.
+
+Please carefully review the sections on behavioral changes for
+potentially backwards-incompatible changes in behavior.
+
+Platform / Installer Changes
+============================
+
+Setuptools is now required for install
+--------------------------------------
+
+SQLAlchemy's ``setup.py`` file has for many years supported operation
+both with Setuptools installed and without; supporting a "fallback" mode
+that uses straight Distutils. As a Setuptools-less Python environment is
+now unheard of, and in order to support the featureset of Setuptools
+more fully, in particular to support py.test's integration with it,
+``setup.py`` now depends on Setuptools fully.
+
+.. seealso::
+
+ :ref:`installation`
+
+:ticket:`3489`
+
+Enabling / Disabling C Extension builds is only via environment variable
+------------------------------------------------------------------------
+
+The C Extensions build by default during install as long as it is possible.
+To disable C extension builds, the ``DISABLE_SQLALCHEMY_CEXT`` environment
+variable was made available as of SQLAlchemy 0.8.6 / 0.9.4. The previous
+approach of using the ``--without-cextensions`` argument has been removed,
+as it relies on deprecated features of setuptools.
+
+.. seealso::
+
+ :ref:`c_extensions`
+
+:ticket:`3500`
+
+
+New Features and Improvements - ORM
+===================================
+
+.. _change_2677:
+
+New Session lifecycle events
+----------------------------
+
+The :class:`.Session` has long supported events that allow some degree
+of tracking of state changes to objects, including
+:meth:`.SessionEvents.before_attach`, :meth:`.SessionEvents.after_attach`,
+and :meth:`.SessionEvents.before_flush`. The Session documentation also
+documents major object states at :ref:`session_object_states`. However,
+there has never been system of tracking objects specifically as they
+pass through these transitions. Additionally, the status of "deleted" objects
+has historically been murky as the objects act somewhere between
+the "persistent" and "detached" states.
+
+To clean up this area and allow the realm of session state transition
+to be fully transparent, a new series of events have been added that
+are intended to cover every possible way that an object might transition
+between states, and additionally the "deleted" status has been given
+its own official state name within the realm of session object states.
+
+New State Transition Events
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Transitions between all states of an object such as :term:`persistent`,
+:term:`pending` and others can now be intercepted in terms of a
+session-level event intended to cover a specific transition.
+Transitions as objects move into a :class:`.Session`, move out of a
+:class:`.Session`, and even all the transitions which occur when the
+transaction is rolled back using :meth:`.Session.rollback`
+are explicitly present in the interface of :class:`.SessionEvents`.
+
+In total, there are **ten new events**. A summary of these events is in a
+newly written documentation section :ref:`session_lifecycle_events`.
+
+
+New Object State "deleted" is added, deleted objects no longer "persistent"
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The :term:`persistent` state of an object in the :class:`.Session` has
+always been documented as an object that has a valid database identity;
+however in the case of objects that were deleted within a flush, they
+have always been in a grey area where they are not really "detached"
+from the :class:`.Session` yet, because they can still be restored
+within a rollback, but are not really "persistent" because their database
+identity has been deleted and they aren't present in the identity map.
+
+To resolve this grey area given the new events, a new object state
+:term:`deleted` is introduced. This state exists between the "persistent" and
+"detached" states. An object that is marked for deletion via
+:meth:`.Session.delete` remains in the "persistent" state until a flush
+proceeds; at that point, it is removed from the identity map, moves
+to the "deleted" state, and the :meth:`.SessionEvents.persistent_to_deleted`
+hook is invoked. If the :class:`.Session` object's transaction is rolled
+back, the object is restored as persistent; the
+:meth:`.SessionEvents.deleted_to_persistent` transition is called. Otherwise
+if the :class:`.Session` object's transaction is committed,
+the :meth:`.SessionEvents.deleted_to_detached` transition is invoked.
+
+Additionally, the :attr:`.InstanceState.persistent` accessor **no longer returns
+True** for an object that is in the new "deleted" state; instead, the
+:attr:`.InstanceState.deleted` accessor has been enhanced to reliably
+report on this new state. When the object is detached, the :attr:`.InstanceState.deleted`
+returns False and the :attr:`.InstanceState.detached` accessor is True
+instead. To determine if an object was deleted either in the current
+transaction or in a previous transaction, use the
+:attr:`.InstanceState.was_deleted` accessor.
+
+Strong Identity Map is Deprecated
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+One of the inspirations for the new series of transition events was to enable
+leak-proof tracking of objects as they move in and out of the identity map,
+so that a "strong reference" may be maintained mirroring the object
+moving in and out of this map. With this new capability, there is no longer
+any need for the :paramref:`.Session.weak_identity_map` parameter and the
+corresponding :class:`.StrongIdentityMap` object. This option has remained
+in SQLAlchemy for many years as the "strong-referencing" behavior used to be
+the only behavior available, and many applications were written to assume
+this behavior. It has long been recommended that strong-reference tracking
+of objects not be an intrinsic job of the :class:`.Session` and instead
+be an application-level construct built as needed by the application; the
+new event model allows even the exact behavior of the strong identity map
+to be replicated. See :ref:`session_referencing_behavior` for a new
+recipe illustrating how to replace the strong identity map.
+
+:ticket:`2677`
+
+.. _change_3499:
+
+Changes regarding "unhashable" types
+------------------------------------
+
+The :class:`.Query` object has a well-known behavior of "deduping"
+returned rows that contain at least one ORM-mapped entity (e.g., a
+full mapped object, as opposed to individual column values). The
+primary purpose of this is so that the handling of entities works
+smoothly in conjunction with the identity map, including to
+accommodate for the duplicate entities normally represented within
+joined eager loading, as well as when joins are used for the purposes
+of filtering on additional columns.
+
+This deduplication relies upon the hashability of the elements within
+the row. With the introduction of Postgresql's special types like
+:class:`.postgresql.ARRAY`, :class:`.postgresql.HSTORE` and
+:class:`.postgresql.JSON`, the experience of types within rows being
+unhashable and encountering problems here is more prevalent than
+it was previously.
+
+In fact, SQLAlchemy has since version 0.8 included a flag on datatypes that
+are noted as "unhashable", however this flag was not used consistently
+on built in types. As described in :ref:`change_3499_postgresql`, this
+flag is now set consistently for all of Postgresql's "structural" types.
+
+The "unhashable" flag is also set on the :class:`.NullType` type,
+as :class:`.NullType` is used to refer to any expression of unknown
+type.
+
+Additionally, the treatment of a so-called "unhashable" type is slightly
+different than its been in previous releases; internally we are using
+the ``id()`` function to get a "hash value" from these structures, just
+as we would any ordinary mapped object. This replaces the previous
+approach which applied a counter to the object.
+
+:ticket:`3499`
+
+.. _change_3321:
+
+Specific checks added for passing mapped classes, instances as SQL literals
+---------------------------------------------------------------------------
+
+The typing system now has specific checks for passing of SQLAlchemy
+"inspectable" objects in contexts where they would otherwise be handled as
+literal values. Any SQLAlchemy built-in object that is legal to pass as a
+SQL value includes a method ``__clause_element__()`` which provides a
+valid SQL expression for that object. For SQLAlchemy objects that
+don't provide this, such as mapped classes, mappers, and mapped
+instances, a more informative error message is emitted rather than
+allowing the DBAPI to receive the object and fail later. An example
+is illustrated below, where a string-based attribute ``User.name`` is
+compared to a full instance of ``User()``, rather than against a
+string value::
+
+ >>> some_user = User()
+ >>> q = s.query(User).filter(User.name == some_user)
+ ...
+ sqlalchemy.exc.ArgumentError: Object <__main__.User object at 0x103167e90> is not legal as a SQL literal value
+
+The exception is now immediate when the comparison is made between
+``User.name == some_user``. Previously, a comparison like the above
+would produce a SQL expression that would only fail once resolved
+into a DBAPI execution call; the mapped ``User`` object would
+ultimately become a bound parameter that would be rejected by the
+DBAPI.
+
+Note that in the above example, the expression fails because
+``User.name`` is a string-based (e.g. column oriented) attribute.
+The change does *not* impact the usual case of comparing a many-to-one
+relationship attribute to an object, which is handled distinctly::
+
+ >>> # Address.user refers to the User mapper, so
+ >>> # this is of course still OK!
+ >>> q = s.query(Address).filter(Address.user == some_user)
+
+
+:ticket:`3321`
+
+.. _change_3250:
+
+New options allowing explicit persistence of NULL over a default
+----------------------------------------------------------------
+
+Related to the new JSON-NULL support added to Postgresql as part of
+:ref:`change_3514`, the base :class:`.TypeEngine` class now supports
+a method :meth:`.TypeEngine.evaluates_none` which allows a positive set
+of the ``None`` value on an attribute to be persisted as NULL, rather than
+omitting the column from the INSERT statement, which has the effect of using
+the column-level default. This allows a mapper-level
+configuration of the existing object-level technique of assigning
+:func:`.sql.null` to the attribute.
+
+.. seealso::
+
+ :ref:`session_forcing_null`
+
+:ticket:`3250`
+
+New Features and Improvements - Core
+====================================
+
+
+.. _change_2528:
+
+A UNION or similar of SELECTs with LIMIT/OFFSET/ORDER BY now parenthesizes the embedded selects
+-----------------------------------------------------------------------------------------------
+
+An issue that, like others, was long driven by SQLite's lack of capabilities
+has now been enhanced to work on all supporting backends. We refer to a query that
+is a UNION of SELECT statements that themselves contain row-limiting or ordering
+features which include LIMIT, OFFSET, and/or ORDER BY::
+
+ (SELECT x FROM table1 ORDER BY y LIMIT 1) UNION
+ (SELECT x FROM table2 ORDER BY y LIMIT 2)
+
+The above query requires parenthesis within each sub-select in order to
+group the sub-results correctly. Production of the above statement in
+SQLAlchemy Core looks like::
+
+ stmt1 = select([table1.c.x]).order_by(table1.c.y).limit(1)
+ stmt2 = select([table1.c.x]).order_by(table2.c.y).limit(2)
+
+ stmt = union(stmt1, stmt2)
+
+Previously, the above construct would not produce parenthesization for the
+inner SELECT statements, producing a query that fails on all backends.
+
+The above formats will **continue to fail on SQLite**; additionally, the format
+that includes ORDER BY but no LIMIT/SELECT will **continue to fail on Oracle**.
+This is not a backwards-incompatible change, because the queries fail without
+the parentheses as well; with the fix, the queries at least work on all other
+databases.
+
+In all cases, in order to produce a UNION of limited SELECT statements that
+also works on SQLite and in all cases on Oracle, the
+subqueries must be a SELECT of an ALIAS::
+
+ stmt1 = select([table1.c.x]).order_by(table1.c.y).limit(1).alias().select()
+ stmt2 = select([table2.c.x]).order_by(table2.c.y).limit(2).alias().select()
+
+ stmt = union(stmt1, stmt2)
+
+This workaround works on all SQLAlchemy versions. In the ORM, it looks like::
+
+ stmt1 = session.query(Model1).order_by(Model1.y).limit(1).subquery().select()
+ stmt2 = session.query(Model2).order_by(Model2.y).limit(1).subquery().select()
+
+ stmt = session.query(Model1).from_statement(stmt1.union(stmt2))
+
+The behavior here has many parallels to the "join rewriting" behavior
+introduced in SQLAlchemy 0.9 in :ref:`feature_joins_09`; however in this case
+we have opted not to add new rewriting behavior to accommodate this
+case for SQLite.
+The existing rewriting behavior is very complicated already, and the case of
+UNIONs with parenthesized SELECT statements is much less common than the
+"right-nested-join" use case of that feature.
+
+:ticket:`2528`
+
+.. _change_3516:
+
+Array support added to Core; new ANY and ALL operators
+------------------------------------------------------
+
+Along with the enhancements made to the Postgresql :class:`.ARRAY`
+type described in :ref:`change_3503`, the base class of :class:`.ARRAY`
+itself has been moved to Core in a new class :class:`.types.Array`.
+
+Arrays are part of the SQL standard, as are several array-oriented functions
+such as ``array_agg()`` and ``unnest()``. In support of these constructs
+for not just PostgreSQL but also potentially for other array-capable backends
+in the future such as DB2, the majority of array logic for SQL expressions
+is now in Core. The :class:`.Array` type still **only works on
+Postgresql**, however it can be used directly, supporting special array
+use cases such as indexed access, as well as support for the ANY and ALL::
+
+ mytable = Table("mytable", metadata,
+ Column("data", Array(Integer, dimensions=2))
+ )
+
+ expr = mytable.c.data[5][6]
+
+ expr = mytable.c.data[5].any(12)
+
+In support of ANY and ALL, the :class:`.Array` type retains the same
+:meth:`.Array.Comparator.any` and :meth:`.Array.Comparator.all` methods
+from the PostgreSQL type, but also exports these operations to new
+standalone operator functions :func:`.sql.expression.any_` and
+:func:`.sql.expression.all_`. These two functions work in more
+of the traditional SQL way, allowing a right-side expression form such
+as::
+
+ from sqlalchemy import any_, all_
+
+ select([mytable]).where(12 == any_(mytable.c.data[5]))
+
+For the PostgreSQL-specific operators "contains", "contained_by", and
+"overlaps", one should continue to use the :class:`.postgresql.ARRAY`
+type directly, which provides all functionality of the :class:`.Array`
+type as well.
+
+The :func:`.sql.expression.any_` and :func:`.sql.expression.all_` operators
+are open-ended at the Core level, however their interpretation by backend
+databases is limited. On the Postgresql backend, the two operators
+**only accept array values**. Whereas on the MySQL backend, they
+**only accept subquery values**. On MySQL, one can use an expression
+such as::
+
+ from sqlalchemy import any_, all_
+
+ subq = select([mytable.c.value])
+ select([mytable]).where(12 > any_(subq))
+
+
+:ticket:`3516`
+
+.. _change_3132:
+
+New Function features, "WITHIN GROUP", array_agg and set aggregate functions
+----------------------------------------------------------------------------
+
+With the new :class:`.Array` type we can also implement a pre-typed
+function for the ``array_agg()`` SQL function that returns an array,
+which is now available using :class:`.array_agg`::
+
+ from sqlalchemy import func
+ stmt = select([func.array_agg(table.c.value)])
+
+A Postgresql element for an aggregate ORDER BY is also added via
+:class:`.postgresql.aggregate_order_by`::
+
+ from sqlalchemy.dialects.postgresql import aggregate_order_by
+ expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
+ stmt = select([expr])
+
+Producing::
+
+ SELECT array_agg(table1.a ORDER BY table1.b DESC) AS array_agg_1 FROM table1
+
+The PG dialect itself also provides an :func:`.postgresql.array_agg` wrapper to
+ensure the :class:`.postgresql.ARRAY` type::
+
+ from sqlalchemy.dialects.postgresql import array_agg
+ stmt = select([array_agg(table.c.value).contains('foo')])
+
+
+Additionally, functions like ``percentile_cont()``, ``percentile_disc()``,
+``rank()``, ``dense_rank()`` and others that require an ordering via
+``WITHIN GROUP (ORDER BY <expr>)`` are now available via the
+:meth:`.FunctionElement.within_group` modifier::
+
+ from sqlalchemy import func
+ stmt = select([
+ department.c.id,
+ func.percentile_cont(0.5).within_group(
+ department.c.salary.desc()
+ )
+ ])
+
+The above statement would produce SQL similar to::
+
+ SELECT department.id, percentile_cont(0.5)
+ WITHIN GROUP (ORDER BY department.salary DESC)
+
+Placeholders with correct return types are now provided for these functions,
+and include :class:`.percentile_cont`, :class:`.percentile_disc`,
+:class:`.rank`, :class:`.dense_rank`, :class:`.mode`, :class:`.percent_rank`,
+and :class:`.cume_dist`.
+
+:ticket:`3132` :ticket:`1370`
+
+.. _change_2919:
+
+TypeDecorator now works with Enum, Boolean, "schema" types automatically
+------------------------------------------------------------------------
+
+The :class:`.SchemaType` types include types such as :class:`.Enum`
+and :class:`.Boolean` which, in addition to corresponding to a database
+type, also generate either a CHECK constraint or in the case of Postgresql
+ENUM a new CREATE TYPE statement, will now work automatically with
+:class:`.TypeDecorator` recipes. Previously, a :class:`.TypeDecorator` for
+an :class:`.postgresql.ENUM` had to look like this::
+
+ # old way
+ class MyEnum(TypeDecorator, SchemaType):
+ impl = postgresql.ENUM('one', 'two', 'three', name='myenum')
+
+ def _set_table(self, table):
+ self.impl._set_table(table)
+
+The :class:`.TypeDecorator` now propagates those additional events so it
+can be done like any other type::
+
+ # new way
+ class MyEnum(TypeDecorator):
+ impl = postgresql.ENUM('one', 'two', 'three', name='myenum')
+
+
+:ticket:`2919`
+
+.. _change_3531:
+
+The type_coerce function is now a persistent SQL element
+--------------------------------------------------------
+
+The :func:`.expression.type_coerce` function previously would return
+an object either of type :class:`.BindParameter` or :class:`.Label`, depending
+on the input. An effect this would have was that in the case where expression
+transformations were used, such as the conversion of an element from a
+:class:`.Column` to a :class:`.BindParameter` that's critical to ORM-level
+lazy loading, the type coercion information would not be used since it would
+have been lost already.
+
+To improve this behavior, the function now returns a persistent
+:class:`.TypeCoerce` container around the given expression, which itself
+remains unaffected; this construct is evaluated explicitly by the
+SQL compiler. This allows for the coercion of the inner expression
+to be maintained no matter how the statement is modified, including if
+the contained element is replaced with a different one, as is common
+within the ORM's lazy loading feature.
+
+The test case illustrating the effect makes use of a heterogeneous
+primaryjoin condition in conjunction with custom types and lazy loading.
+Given a custom type that applies a CAST as a "bind expression"::
+
+ class StringAsInt(TypeDecorator):
+ impl = String
+
+ def column_expression(self, col):
+ return cast(col, Integer)
+
+ def bind_expression(self, value):
+ return cast(value, String)
+
+Then, a mapping where we are equating a string "id" column on one
+table to an integer "id" column on the other::
+
+ class Person(Base):
+ __tablename__ = 'person'
+ id = Column(StringAsInt, primary_key=True)
+
+ pets = relationship(
+ 'Pets',
+ primaryjoin=(
+ 'foreign(Pets.person_id)'
+ '==cast(type_coerce(Person.id, Integer), Integer)'
+ )
+ )
+
+ class Pets(Base):
+ __tablename__ = 'pets'
+ id = Column('id', Integer, primary_key=True)
+ person_id = Column('person_id', Integer)
+
+Above, in the :paramref:`.relationship.primaryjoin` expression, we are
+using :func:`.type_coerce` to handle bound parameters passed via
+lazyloading as integers, since we already know these will come from
+our ``StringAsInt`` type which maintains the value as an integer in
+Python. We are then using :func:`.cast` so that as a SQL expression,
+the VARCHAR "id" column will be CAST to an integer for a regular non-
+converted join as with :meth:`.Query.join` or :func:`.orm.joinedload`.
+That is, a joinedload of ``.pets`` looks like::
+
+ SELECT person.id AS person_id, pets_1.id AS pets_1_id,
+ pets_1.person_id AS pets_1_person_id
+ FROM person
+ LEFT OUTER JOIN pets AS pets_1
+ ON pets_1.person_id = CAST(person.id AS INTEGER)
+
+Without the CAST in the ON clause of the join, strongly-typed databases
+such as Postgresql will refuse to implicitly compare the integer and fail.
+
+The lazyload case of ``.pets`` relies upon replacing
+the ``Person.id`` column at load time with a bound parameter, which receives
+a Python-loaded value. This replacement is specifically where the intent
+of our :func:`.type_coerce` function would be lost. Prior to the change,
+this lazy load comes out as::
+
+ SELECT pets.id AS pets_id, pets.person_id AS pets_person_id
+ FROM pets
+ WHERE pets.person_id = CAST(CAST(%(param_1)s AS VARCHAR) AS INTEGER)
+ {'param_1': 5}
+
+Where above, we see that our in-Python value of ``5`` is CAST first
+to a VARCHAR, then back to an INTEGER in SQL; a double CAST which works,
+but is nevertheless not what we asked for.
+
+With the change, the :func:`.type_coerce` function maintains a wrapper
+even after the column is swapped out for a bound parameter, and the query now
+looks like::
+
+ SELECT pets.id AS pets_id, pets.person_id AS pets_person_id
+ FROM pets
+ WHERE pets.person_id = CAST(%(param_1)s AS INTEGER)
+ {'param_1': 5}
+
+Where our outer CAST that's in our primaryjoin still takes effect, but the
+needless CAST that's in part of the ``StringAsInt`` custom type is removed
+as intended by the :func:`.type_coerce` function.
+
+
+:ticket:`3531`
+
+
+Key Behavioral Changes - ORM
+============================
+
+
+Key Behavioral Changes - Core
+=============================
+
+
+Dialect Improvements and Changes - Postgresql
+=============================================
+
+.. _change_3499_postgresql:
+
+ARRAY and JSON types now correctly specify "unhashable"
+-------------------------------------------------------
+
+As described in :ref:`change_3499`, the ORM relies upon being able to
+produce a hash function for column values when a query's selected entities
+mixes full ORM entities with column expressions. The ``hashable=False``
+flag is now correctly set on all of PG's "data structure" types, including
+:class:`.ARRAY` and :class:`.JSON`. The :class:`.JSONB` and :class:`.HSTORE`
+types already included this flag. For :class:`.ARRAY`,
+this is conditional based on the :paramref:`.postgresql.ARRAY.as_tuple`
+flag, however it should no longer be necessary to set this flag
+in order to have an array value present in a composed ORM row.
+
+.. seealso::
+
+ :ref:`change_3499`
+
+ :ref:`change_3503`
+
+:ticket:`3499`
+
+.. _change_3503:
+
+Correct SQL Types are Established from Indexed Access of ARRAY, JSON, HSTORE
+-----------------------------------------------------------------------------
+
+For all three of :class:`~.postgresql.ARRAY`, :class:`~.postgresql.JSON` and :class:`.HSTORE`,
+the SQL type assigned to the expression returned by indexed access, e.g.
+``col[someindex]``, should be correct in all cases.
+
+This includes:
+
+* The SQL type assigned to indexed access of an :class:`~.postgresql.ARRAY` takes into
+ account the number of dimensions configured. An :class:`~.postgresql.ARRAY` with three
+ dimensions will return a SQL expression with a type of :class:`~.postgresql.ARRAY` of
+ one less dimension. Given a column with type ``ARRAY(Integer, dimensions=3)``,
+ we can now perform this expression::
+
+ int_expr = col[5][6][7] # returns an Integer expression object
+
+ Previously, the indexed access to ``col[5]`` would return an expression of
+ type :class:`.Integer` where we could no longer perform indexed access
+ for the remaining dimensions, unless we used :func:`.cast` or :func:`.type_coerce`.
+
+* The :class:`~.postgresql.JSON` and :class:`~.postgresql.JSONB` types now mirror what Postgresql
+ itself does for indexed access. This means that all indexed access for
+ a :class:`~.postgresql.JSON` or :class:`~.postgresql.JSONB` type returns an expression that itself
+ is *always* :class:`~.postgresql.JSON` or :class:`~.postgresql.JSONB` itself, unless the
+ :attr:`~.postgresql.JSON.Comparator.astext` modifier is used. This means that whether
+ the indexed access of the JSON structure ultimately refers to a string,
+ list, number, or other JSON structure, Postgresql always considers it
+ to be JSON itself unless it is explicitly cast differently. Like
+ the :class:`~.postgresql.ARRAY` type, this means that it is now straightforward
+ to produce JSON expressions with multiple levels of indexed access::
+
+ json_expr = json_col['key1']['attr1'][5]
+
+* The "textual" type that is returned by indexed access of :class:`.HSTORE`
+ as well as the "textual" type that is returned by indexed access of
+ :class:`~.postgresql.JSON` and :class:`~.postgresql.JSONB` in conjunction with the
+ :attr:`~.postgresql.JSON.Comparator.astext` modifier is now configurable; it defaults
+ to :class:`.Text` in both cases but can be set to a user-defined
+ type using the :paramref:`.postgresql.JSON.astext_type` or
+ :paramref:`.postgresql.HSTORE.text_type` parameters.
+
+.. seealso::
+
+ :ref:`change_3503_cast`
+
+:ticket:`3499`
+:ticket:`3487`
+
+.. _change_3503_cast:
+
+The JSON cast() operation now requires ``.astext`` is called explicitly
+------------------------------------------------------------------------
+
+As part of the changes in :ref:`change_3503`, the workings of the
+:meth:`.ColumnElement.cast` operator on :class:`.postgresql.JSON` and
+:class:`.postgresql.JSONB` no longer implictly invoke the
+:attr:`.JSON.Comparator.astext` modifier; Postgresql's JSON/JSONB types
+support CAST operations to each other without the "astext" aspect.
+
+This means that in most cases, an application that was doing this::
+
+ expr = json_col['somekey'].cast(Integer)
+
+Will now need to change to this::
+
+ expr = json_col['somekey'].astext.cast(Integer)
+
+
+
+.. _change_3514:
+
+Postgresql JSON "null" is inserted as expected with ORM operations, regardless of column default present
+-----------------------------------------------------------------------------------------------------------
+
+The :class:`.JSON` type has a flag :paramref:`.JSON.none_as_null` which
+when set to True indicates that the Python value ``None`` should translate
+into a SQL NULL rather than a JSON NULL value. This flag defaults to False,
+which means that the column should *never* insert SQL NULL or fall back
+to a default unless the :func:`.null` constant were used. However, this would
+fail in the ORM under two circumstances; one is when the column also contained
+a default or server_default value, a positive value of ``None`` on the mapped
+attribute would still result in the column-level default being triggered,
+replacing the ``None`` value::
+
+ obj = MyObject(json_value=None)
+ session.add(obj)
+ session.commit() # would fire off default / server_default, not encode "'none'"
+
+The other is when the :meth:`.Session.bulk_insert_mappings`
+method were used, ``None`` would be ignored in all cases::
+
+ session.bulk_insert_mappings(
+ MyObject,
+ [{"json_value": None}]) # would insert SQL NULL and/or trigger defaults
+
+The :class:`.JSON` type now implements the
+:attr:`.TypeEngine.should_evaluate_none` flag,
+indicating that ``None`` should not be ignored here; it is configured
+automatically based on the value of :paramref:`.JSON.none_as_null`.
+Thanks to :ticket:`3061`, we can differentiate when the value ``None`` is actively
+set by the user versus when it was never set at all.
+
+If the attribute is not set at all, then column level defaults *will*
+fire off and/or SQL NULL will be inserted as expected, as was the behavior
+previously. Below, the two variants are illustrated::
+
+ obj = MyObject(json_value=None)
+ session.add(obj)
+ session.commit() # *will not* fire off column defaults, will insert JSON 'null'
+
+ obj = MyObject()
+ session.add(obj)
+ session.commit() # *will* fire off column defaults, and/or insert SQL NULL
+
+:ticket:`3514`
+
+.. seealso::
+
+ :ref:`change_3250`
+
+ :ref:`change_3514_jsonnull`
+
+.. _change_3514_jsonnull:
+
+New JSON.NULL Constant Added
+----------------------------
+
+To ensure that an application can always have full control at the value level
+of whether a :class:`.postgresql.JSON` or :class:`.postgresql.JSONB` column
+should receive a SQL NULL or JSON ``"null"`` value, the constant
+:attr:`.postgresql.JSON.NULL` has been added, which in conjunction with
+:func:`.null` can be used to determine fully between SQL NULL and
+JSON ``"null"``, regardless of what :paramref:`.JSON.none_as_null` is set
+to::
+
+ from sqlalchemy import null
+ from sqlalchemy.dialects.postgresql import JSON
+
+ obj1 = MyObject(json_value=null()) # will *always* insert SQL NULL
+ obj2 = MyObject(json_value=JSON.NULL) # will *always* insert JSON string "null"
+
+ session.add_all([obj1, obj2])
+ session.commit()
+
+.. seealso::
+
+ :ref:`change_3514`
+
+:ticket:`3514`
+
+.. _change_2729:
+
+ARRAY with ENUM will now emit CREATE TYPE for the ENUM
+------------------------------------------------------
+
+A table definition like the following will now emit CREATE TYPE
+as expected::
+
+ enum = Enum(
+ 'manager', 'place_admin', 'carwash_admin',
+ 'parking_admin', 'service_admin', 'tire_admin',
+ 'mechanic', 'carwasher', 'tire_mechanic', name="work_place_roles")
+
+ class WorkPlacement(Base):
+ __tablename__ = 'work_placement'
+ id = Column(Integer, primary_key=True)
+ roles = Column(ARRAY(enum))
+
+
+ e = create_engine("postgresql://scott:tiger@localhost/test", echo=True)
+ Base.metadata.create_all(e)
+
+emits::
+
+ CREATE TYPE work_place_roles AS ENUM (
+ 'manager', 'place_admin', 'carwash_admin', 'parking_admin',
+ 'service_admin', 'tire_admin', 'mechanic', 'carwasher',
+ 'tire_mechanic')
+
+ CREATE TABLE work_placement (
+ id SERIAL NOT NULL,
+ roles work_place_roles[],
+ PRIMARY KEY (id)
+ )
+
+
+:ticket:`2729`
+
+Dialect Improvements and Changes - MySQL
+=============================================
+
+
+Dialect Improvements and Changes - SQLite
+=============================================
+
+.. _change_sqlite_schemas:
+
+Improved Support for Remote Schemas
+------------------------------------
+
+The SQLite dialect now implements :meth:`.Inspector.get_schema_names`
+and additionally has improved support for tables and indexes that are
+created and reflected from a remote schema, which in SQLite is a
+database that is assigned a name via the ``ATTACH`` statement; previously,
+the ``CREATE INDEX`` DDL didn't work correctly for a schema-bound table
+and the :meth:`.Inspector.get_foreign_keys` method will now indicate the
+given schema in the results. Cross-schema foreign keys aren't supported.
+
+
+Dialect Improvements and Changes - SQL Server
+=============================================
+
+.. _change_3504:
+
+String / varlength types no longer represent "max" explicitly on reflection
+---------------------------------------------------------------------------
+
+When reflecting a type such as :class:`.String`, :class:`.Text`, etc.
+which includes a length, an "un-lengthed" type under SQL Server would
+copy the "length" parameter as the value ``"max"``::
+
+ >>> from sqlalchemy import create_engine, inspect
+ >>> engine = create_engine('mssql+pyodbc://scott:tiger@ms_2008', echo=True)
+ >>> engine.execute("create table s (x varchar(max), y varbinary(max))")
+ >>> insp = inspect(engine)
+ >>> for col in insp.get_columns("s"):
+ ... print col['type'].__class__, col['type'].length
+ ...
+ <class 'sqlalchemy.sql.sqltypes.VARCHAR'> max
+ <class 'sqlalchemy.dialects.mssql.base.VARBINARY'> max
+
+The "length" parameter in the base types is expected to be an integer value
+or None only; None indicates unbounded length which the SQL Server dialect
+interprets as "max". The fix then is so that these lengths come
+out as None, so that the type objects work in non-SQL Server contexts::
+
+ >>> for col in insp.get_columns("s"):
+ ... print col['type'].__class__, col['type'].length
+ ...
+ <class 'sqlalchemy.sql.sqltypes.VARCHAR'> None
+ <class 'sqlalchemy.dialects.mssql.base.VARBINARY'> None
+
+Applications which may have been relying on a direct comparison of the "length"
+value to the string "max" should consider the value of ``None`` to mean
+the same thing.
+
+:ticket:`3504`
+
+.. _change_3434:
+
+The legacy_schema_aliasing flag is now set to False
+---------------------------------------------------
+
+SQLAlchemy 1.0.5 introduced the ``legacy_schema_aliasing`` flag to the
+MSSQL dialect, allowing so-called "legacy mode" aliasing to be turned off.
+This aliasing attempts to turn schema-qualified tables into aliases;
+given a table such as::
+
+ account_table = Table(
+ 'account', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('info', String(100)),
+ schema="customer_schema"
+ )
+
+The legacy mode of behavior will attempt to turn a schema-qualified table
+name into an alias::
+
+ >>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=True)
+ >>> print(account_table.select().compile(eng))
+ SELECT account_1.id, account_1.info
+ FROM customer_schema.account AS account_1
+
+However, this aliasing has been shown to be unnecessary and in many cases
+produces incorrect SQL.
+
+In SQLAlchemy 1.1, the ``legacy_schema_aliasing`` flag now defaults to
+False, disabling this mode of behavior and allowing the MSSQL dialect to behave
+normally with schema-qualified tables. For applications which may rely
+on this behavior, set the flag back to True.
+
+
+:ticket:`3434`
+
+Dialect Improvements and Changes - Oracle
+=============================================
diff --git a/doc/build/conf.py b/doc/build/conf.py
index fa9be2d25..e19078a87 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -136,11 +136,11 @@ copyright = u'2007-2015, the SQLAlchemy authors and contributors'
# built documents.
#
# The short X.Y version.
-version = "1.0"
+version = "1.1"
# The full version, including alpha/beta/rc tags.
-release = "1.0.6"
+release = "1.1.0b1"
-release_date = "June 25, 2015"
+release_date = "not released"
site_base = os.environ.get("RTD_SITE_BASE", "http://www.sqlalchemy.org")
site_adapter_template = "docs_adapter.mako"
diff --git a/doc/build/core/ddl.rst b/doc/build/core/ddl.rst
index 0ba2f2806..820ba7b84 100644
--- a/doc/build/core/ddl.rst
+++ b/doc/build/core/ddl.rst
@@ -20,85 +20,100 @@ required, SQLAlchemy offers two techniques which can be used to add any DDL
based on any condition, either accompanying the standard generation of tables
or by itself.
-.. _schema_ddl_sequences:
-
-Controlling DDL Sequences
--------------------------
+Custom DDL
+----------
-The ``sqlalchemy.schema`` package contains SQL expression constructs that
-provide DDL expressions. For example, to produce a ``CREATE TABLE`` statement:
+Custom DDL phrases are most easily achieved using the
+:class:`~sqlalchemy.schema.DDL` construct. This construct works like all the
+other DDL elements except it accepts a string which is the text to be emitted:
.. sourcecode:: python+sql
- from sqlalchemy.schema import CreateTable
- {sql}engine.execute(CreateTable(mytable))
- CREATE TABLE mytable (
- col1 INTEGER,
- col2 INTEGER,
- col3 INTEGER,
- col4 INTEGER,
- col5 INTEGER,
- col6 INTEGER
- ){stop}
+ event.listen(
+ metadata,
+ "after_create",
+ DDL("ALTER TABLE users ADD CONSTRAINT "
+ "cst_user_name_length "
+ " CHECK (length(user_name) >= 8)")
+ )
-Above, the :class:`~sqlalchemy.schema.CreateTable` construct works like any
-other expression construct (such as ``select()``, ``table.insert()``, etc.). A
-full reference of available constructs is in :ref:`schema_api_ddl`.
+A more comprehensive method of creating libraries of DDL constructs is to use
+custom compilation - see :ref:`sqlalchemy.ext.compiler_toplevel` for
+details.
-The DDL constructs all extend a common base class which provides the
-capability to be associated with an individual
-:class:`~sqlalchemy.schema.Table` or :class:`~sqlalchemy.schema.MetaData`
-object, to be invoked upon create/drop events. Consider the example of a table
-which contains a CHECK constraint:
-.. sourcecode:: python+sql
+.. _schema_ddl_sequences:
+
+Controlling DDL Sequences
+-------------------------
- users = Table('users', metadata,
- Column('user_id', Integer, primary_key=True),
- Column('user_name', String(40), nullable=False),
- CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
- )
+The :class:`~.schema.DDL` construct introduced previously also has the
+ability to be invoked conditionally based on inspection of the
+database. This feature is available using the :meth:`.DDLElement.execute_if`
+method. For example, if we wanted to create a trigger but only on
+the Postgresql backend, we could invoke this as::
- {sql}users.create(engine)
- CREATE TABLE users (
- user_id SERIAL NOT NULL,
- user_name VARCHAR(40) NOT NULL,
- PRIMARY KEY (user_id),
- CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8)
- ){stop}
+ mytable = Table(
+ 'mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50))
+ )
-The above table contains a column "user_name" which is subject to a CHECK
-constraint that validates that the length of the string is at least eight
-characters. When a ``create()`` is issued for this table, DDL for the
-:class:`~sqlalchemy.schema.CheckConstraint` will also be issued inline within
-the table definition.
+ trigger = DDL(
+ "CREATE TRIGGER dt_ins BEFORE INSERT ON mytable "
+ "FOR EACH ROW BEGIN SET NEW.data='ins'; END"
+ )
-The :class:`~sqlalchemy.schema.CheckConstraint` construct can also be
-constructed externally and associated with the
-:class:`~sqlalchemy.schema.Table` afterwards::
+ event.listen(
+ mytable,
+ 'after_create',
+ trigger.execute_if(dialect='postgresql')
+ )
+
+The :paramref:`.DDLElement.execute_if.dialect` keyword also accepts a tuple
+of string dialect names::
- constraint = CheckConstraint('length(user_name) >= 8',name="cst_user_name_length")
- users.append_constraint(constraint)
+ event.listen(
+ mytable,
+ "after_create",
+ trigger.execute_if(dialect=('postgresql', 'mysql'))
+ )
+ event.listen(
+ mytable,
+ "before_drop",
+ trigger.execute_if(dialect=('postgresql', 'mysql'))
+ )
-So far, the effect is the same. However, if we create DDL elements
-corresponding to the creation and removal of this constraint, and associate
-them with the :class:`.Table` as events, these new events
-will take over the job of issuing DDL for the constraint. Additionally, the
-constraint will be added via ALTER:
+The :meth:`.DDLElement.execute_if` method can also work against a callable
+function that will receive the database connection in use. In the
+example below, we use this to conditionally create a CHECK constraint,
+first looking within the Postgresql catalogs to see if it exists:
.. sourcecode:: python+sql
- from sqlalchemy import event
+ def should_create(ddl, target, connection, **kw):
+ row = connection.execute(
+ "select conname from pg_constraint where conname='%s'" %
+ ddl.element.name).scalar()
+ return not bool(row)
+
+ def should_drop(ddl, target, connection, **kw):
+ return not should_create(ddl, target, connection, **kw)
event.listen(
users,
"after_create",
- AddConstraint(constraint)
+ DDL(
+ "ALTER TABLE users ADD CONSTRAINT "
+ "cst_user_name_length CHECK (length(user_name) >= 8)"
+ ).execute_if(callable_=should_create)
)
event.listen(
users,
"before_drop",
- DropConstraint(constraint)
+ DDL(
+ "ALTER TABLE users DROP CONSTRAINT cst_user_name_length"
+ ).execute_if(callable_=should_drop)
)
{sql}users.create(engine)
@@ -108,61 +123,67 @@ constraint will be added via ALTER:
PRIMARY KEY (user_id)
)
+ select conname from pg_constraint where conname='cst_user_name_length'
ALTER TABLE users ADD CONSTRAINT cst_user_name_length CHECK (length(user_name) >= 8){stop}
{sql}users.drop(engine)
+ select conname from pg_constraint where conname='cst_user_name_length'
ALTER TABLE users DROP CONSTRAINT cst_user_name_length
DROP TABLE users{stop}
-The real usefulness of the above becomes clearer once we illustrate the
-:meth:`.DDLElement.execute_if` method. This method returns a modified form of
-the DDL callable which will filter on criteria before responding to a
-received event. It accepts a parameter ``dialect``, which is the string
-name of a dialect or a tuple of such, which will limit the execution of the
-item to just those dialects. It also accepts a ``callable_`` parameter which
-may reference a Python callable which will be invoked upon event reception,
-returning ``True`` or ``False`` indicating if the event should proceed.
-
-If our :class:`~sqlalchemy.schema.CheckConstraint` was only supported by
-Postgresql and not other databases, we could limit its usage to just that dialect::
+Using the built-in DDLElement Classes
+--------------------------------------
- event.listen(
- users,
- 'after_create',
- AddConstraint(constraint).execute_if(dialect='postgresql')
- )
- event.listen(
- users,
- 'before_drop',
- DropConstraint(constraint).execute_if(dialect='postgresql')
- )
+The ``sqlalchemy.schema`` package contains SQL expression constructs that
+provide DDL expressions. For example, to produce a ``CREATE TABLE`` statement:
-Or to any set of dialects::
+.. sourcecode:: python+sql
- event.listen(
- users,
- "after_create",
- AddConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
- )
- event.listen(
- users,
- "before_drop",
- DropConstraint(constraint).execute_if(dialect=('postgresql', 'mysql'))
- )
+ from sqlalchemy.schema import CreateTable
+ {sql}engine.execute(CreateTable(mytable))
+ CREATE TABLE mytable (
+ col1 INTEGER,
+ col2 INTEGER,
+ col3 INTEGER,
+ col4 INTEGER,
+ col5 INTEGER,
+ col6 INTEGER
+ ){stop}
-When using a callable, the callable is passed the ddl element, the
-:class:`.Table` or :class:`.MetaData`
-object whose "create" or "drop" event is in progress, and the
-:class:`.Connection` object being used for the
-operation, as well as additional information as keyword arguments. The
-callable can perform checks, such as whether or not a given item already
-exists. Below we define ``should_create()`` and ``should_drop()`` callables
-that check for the presence of our named constraint:
+Above, the :class:`~sqlalchemy.schema.CreateTable` construct works like any
+other expression construct (such as ``select()``, ``table.insert()``, etc.).
+All of SQLAlchemy's DDL oriented constructs are subclasses of
+the :class:`.DDLElement` base class; this is the base of all the
+objects corresponding to CREATE and DROP as well as ALTER,
+not only in SQLAlchemy but in Alembic Migrations as well.
+A full reference of available constructs is in :ref:`schema_api_ddl`.
+
+User-defined DDL constructs may also be created as subclasses of
+:class:`.DDLElement` itself. The documentation in
+:ref:`sqlalchemy.ext.compiler_toplevel` has several examples of this.
+
+The event-driven DDL system described in the previous section
+:ref:`schema_ddl_sequences` is available with other :class:`.DDLElement`
+objects as well. However, when dealing with the built-in constructs
+such as :class:`.CreateIndex`, :class:`.CreateSequence`, etc, the event
+system is of **limited** use, as methods like :meth:`.Table.create` and
+:meth:`.MetaData.create_all` will invoke these constructs unconditionally.
+In a future SQLAlchemy release, the DDL event system including conditional
+execution will taken into account for built-in constructs that currently
+invoke in all cases.
+
+We can illustrate an event-driven
+example with the :class:`.AddConstraint` and :class:`.DropConstraint`
+constructs, as the event-driven system will work for CHECK and UNIQUE
+constraints, using these as we did in our previous example of
+:meth:`.DDLElement.execute_if`:
.. sourcecode:: python+sql
def should_create(ddl, target, connection, **kw):
- row = connection.execute("select conname from pg_constraint where conname='%s'" % ddl.element.name).scalar()
+ row = connection.execute(
+ "select conname from pg_constraint where conname='%s'" %
+ ddl.element.name).scalar()
return not bool(row)
def should_drop(ddl, target, connection, **kw):
@@ -194,26 +215,12 @@ that check for the presence of our named constraint:
ALTER TABLE users DROP CONSTRAINT cst_user_name_length
DROP TABLE users{stop}
-Custom DDL
-----------
-
-Custom DDL phrases are most easily achieved using the
-:class:`~sqlalchemy.schema.DDL` construct. This construct works like all the
-other DDL elements except it accepts a string which is the text to be emitted:
-
-.. sourcecode:: python+sql
-
- event.listen(
- metadata,
- "after_create",
- DDL("ALTER TABLE users ADD CONSTRAINT "
- "cst_user_name_length "
- " CHECK (length(user_name) >= 8)")
- )
-
-A more comprehensive method of creating libraries of DDL constructs is to use
-custom compilation - see :ref:`sqlalchemy.ext.compiler_toplevel` for
-details.
+While the above example is against the built-in :class:`.AddConstraint`
+and :class:`.DropConstraint` objects, the main usefulness of DDL events
+for now remains focused on the use of the :class:`.DDL` construct itself,
+as well as with user-defined subclasses of :class:`.DDLElement` that aren't
+already part of the :meth:`.MetaData.create_all`, :meth:`.Table.create`,
+and corresponding "drop" processes.
.. _schema_api_ddl:
@@ -233,6 +240,7 @@ DDL Expression Constructs API
:members:
:undoc-members:
+.. autoclass:: _CreateDropBase
.. autoclass:: CreateTable
:members:
diff --git a/doc/build/core/events.rst b/doc/build/core/events.rst
index d19b910b1..451cb9460 100644
--- a/doc/build/core/events.rst
+++ b/doc/build/core/events.rst
@@ -11,10 +11,6 @@ ORM events are described in :ref:`orm_event_toplevel`.
.. autoclass:: sqlalchemy.event.base.Events
:members:
-.. versionadded:: 0.7
- The event system supersedes the previous system of "extension", "listener",
- and "proxy" classes.
-
Connection Pool Events
-----------------------
diff --git a/doc/build/core/sqlelement.rst b/doc/build/core/sqlelement.rst
index 44a969dbb..cf52a0166 100644
--- a/doc/build/core/sqlelement.rst
+++ b/doc/build/core/sqlelement.rst
@@ -9,8 +9,12 @@ constructs is the :class:`.ClauseElement`, which is the base for several
sub-branches. The :class:`.ColumnElement` class is the fundamental unit
used to construct any kind of typed SQL expression.
+.. autofunction:: all_
+
.. autofunction:: and_
+.. autofunction:: any_
+
.. autofunction:: asc
.. autofunction:: between
@@ -65,6 +69,8 @@ used to construct any kind of typed SQL expression.
.. autofunction:: type_coerce
+.. autofunction:: within_group
+
.. autoclass:: BinaryExpression
:members:
@@ -129,9 +135,15 @@ used to construct any kind of typed SQL expression.
.. autoclass:: Tuple
:members:
+.. autoclass:: WithinGroup
+ :members:
+
.. autoclass:: sqlalchemy.sql.elements.True_
:members:
+.. autoclass:: TypeCoerce
+ :members:
+
.. autoclass:: sqlalchemy.sql.operators.custom_op
:members:
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index cc2a97625..c15279236 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -50,13 +50,13 @@ Version Check
=============
-A quick check to verify that we are on at least **version 1.0** of SQLAlchemy:
+A quick check to verify that we are on at least **version 1.1** of SQLAlchemy:
.. sourcecode:: pycon+sql
>>> import sqlalchemy
>>> sqlalchemy.__version__ # doctest:+SKIP
- 1.0.0
+ 1.1.0
Connecting
==========
@@ -754,8 +754,8 @@ method calls is called :term:`method chaining`.
.. _sqlexpression_text:
-Using Text
-===========
+Using Textual SQL
+=================
Our last example really became a handful to type. Going from what one
understands to be a textual SQL expression into a Python construct which
@@ -794,7 +794,27 @@ construct using the :meth:`~.TextClause.bindparams` method; if we are
using datatypes that need special handling as they are received in Python,
or we'd like to compose our :func:`~.expression.text` object into a larger
expression, we may also wish to use the :meth:`~.TextClause.columns` method
-in order to specify column return types and names.
+in order to specify column return types and names:
+
+.. sourcecode:: pycon+sql
+
+ >>> s = text(
+ ... "SELECT users.fullname || ', ' || addresses.email_address AS title "
+ ... "FROM users, addresses "
+ ... "WHERE users.id = addresses.user_id "
+ ... "AND users.name BETWEEN :x AND :y "
+ ... "AND (addresses.email_address LIKE :e1 "
+ ... "OR addresses.email_address LIKE :e2)")
+ >>> s = s.columns(title=String)
+ >>> s = s.bindparams(x='m', y='z', e1='%@aol.com', e2='%@msn.com')
+ >>> conn.execute(s).fetchall() # doctest:+NORMALIZE_WHITESPACE
+ SELECT users.fullname || ', ' || addresses.email_address AS title
+ FROM users, addresses
+ WHERE users.id = addresses.user_id AND users.name BETWEEN ? AND ? AND
+ (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
+ ('m', 'z', '%@aol.com', '%@msn.com')
+ {stop}[(u'Wendy Williams, wendy@aol.com',)]
+
:func:`~.expression.text` can also be used freely within a
:func:`~.expression.select` object, which accepts :func:`~.expression.text`
@@ -841,6 +861,11 @@ need to refer to any pre-established :class:`.Table` metadata:
the less flexibility and ability for manipulation/transformation
the statement will have.
+.. seealso::
+
+ :ref:`orm_tutorial_literal_sql` - integrating ORM-level queries with
+ :func:`.text`
+
.. versionchanged:: 1.0.0
The :func:`.select` construct emits warnings when string SQL
fragments are coerced to :func:`.text`, and :func:`.text` should
diff --git a/doc/build/core/type_api.rst b/doc/build/core/type_api.rst
index 88da4939e..7f0b68b64 100644
--- a/doc/build/core/type_api.rst
+++ b/doc/build/core/type_api.rst
@@ -11,9 +11,11 @@ Base Type API
.. autoclass:: Concatenable
:members:
- :inherited-members:
+.. autoclass:: Indexable
+ :members:
+
.. autoclass:: NullType
diff --git a/doc/build/core/type_basics.rst b/doc/build/core/type_basics.rst
index 1ff1baac2..ec3c14dd6 100644
--- a/doc/build/core/type_basics.rst
+++ b/doc/build/core/type_basics.rst
@@ -38,6 +38,9 @@ database column type available on the target database when issuing a
type is emitted in ``CREATE TABLE``, such as ``VARCHAR`` see `SQL
Standard Types`_ and the other sections of this chapter.
+.. autoclass:: Array
+ :members:
+
.. autoclass:: BigInteger
:members:
diff --git a/doc/build/dialects/postgresql.rst b/doc/build/dialects/postgresql.rst
index e5d8d51bc..7e2a20ef7 100644
--- a/doc/build/dialects/postgresql.rst
+++ b/doc/build/dialects/postgresql.rst
@@ -24,15 +24,18 @@ construction arguments, are as follows:
.. currentmodule:: sqlalchemy.dialects.postgresql
+.. autoclass:: aggregate_order_by
+
.. autoclass:: array
.. autoclass:: ARRAY
:members: __init__, Comparator
+.. autofunction:: array_agg
-.. autoclass:: Any
+.. autofunction:: Any
-.. autoclass:: All
+.. autofunction:: All
.. autoclass:: BIT
:members: __init__
diff --git a/doc/build/faq/connections.rst b/doc/build/faq/connections.rst
index 81a8678b4..658b4f785 100644
--- a/doc/build/faq/connections.rst
+++ b/doc/build/faq/connections.rst
@@ -136,3 +136,84 @@ when :meth:`.Connection.close` is called::
conn.detach() # detaches the DBAPI connection from the connection pool
conn.connection.<go nuts>
conn.close() # connection is closed for real, the pool replaces it with a new connection
+
+How do I use engines / connections / sessions with Python multiprocessing, or os.fork()?
+----------------------------------------------------------------------------------------
+
+The key goal with multiple python processes is to prevent any database connections
+from being shared across processes. Depending on specifics of the driver and OS,
+the issues that arise here range from non-working connections to socket connections that
+are used by multiple processes concurrently, leading to broken messaging (the latter
+case is typically the most common).
+
+The SQLAlchemy :class:`.Engine` object refers to a connection pool of existing
+database connections. So when this object is replicated to a child process,
+the goal is to ensure that no database connections are carried over. There
+are three general approaches to this:
+
+1. Disable pooling using :class:`.NullPool`. This is the most simplistic,
+ one shot system that prevents the :class:`.Engine` from using any connection
+ more than once.
+
+2. Call :meth:`.Engine.dispose` on any given :class:`.Engine` as soon one is
+ within the new process. In Python multiprocessing, constructs such as
+ ``multiprocessing.Pool`` include "initializer" hooks which are a place
+ that this can be performed; otherwise at the top of where ``os.fork()``
+ or where the ``Process`` object begins the child fork, a single call
+ to :meth:`.Engine.dispose` will ensure any remaining connections are flushed.
+
+3. An event handler can be applied to the connection pool that tests for connections
+ being shared across process boundaries, and invalidates them. This looks like
+ the following::
+
+ import os
+ import warnings
+
+ from sqlalchemy import event
+ from sqlalchemy import exc
+
+ def add_engine_pidguard(engine):
+ """Add multiprocessing guards.
+
+ Forces a connection to be reconnected if it is detected
+ as having been shared to a sub-process.
+
+ """
+
+ @event.listens_for(engine, "connect")
+ def connect(dbapi_connection, connection_record):
+ connection_record.info['pid'] = os.getpid()
+
+ @event.listens_for(engine, "checkout")
+ def checkout(dbapi_connection, connection_record, connection_proxy):
+ pid = os.getpid()
+ if connection_record.info['pid'] != pid:
+ # substitute log.debug() or similar here as desired
+ warnings.warn(
+ "Parent process %(orig)s forked (%(newproc)s) with an open "
+ "database connection, "
+ "which is being discarded and recreated." %
+ {"newproc": pid, "orig": connection_record.info['pid']})
+ connection_record.connection = connection_proxy.connection = None
+ raise exc.DisconnectionError(
+ "Connection record belongs to pid %s, "
+ "attempting to check out in pid %s" %
+ (connection_record.info['pid'], pid)
+ )
+
+ These events are applied to an :class:`.Engine` as soon as its created::
+
+ engine = create_engine("...")
+
+ add_engine_pidguard(engine)
+
+The above strategies will accommodate the case of an :class:`.Engine`
+being shared among processes. However, for the case of a transaction-active
+:class:`.Session` or :class:`.Connection` being shared, there's no automatic
+fix for this; an application needs to ensure a new child process only
+initiate new :class:`.Connection` objects and transactions, as well as ORM
+:class:`.Session` objects. For a :class:`.Session` object, technically
+this is only needed if the session is currently transaction-bound, however
+the scope of a single :class:`.Session` is in any case intended to be
+kept within a single call stack in any case (e.g. not a global object, not
+shared between processes or threads).
diff --git a/doc/build/faq/sessions.rst b/doc/build/faq/sessions.rst
index e3aae00ce..2e4bdd4c8 100644
--- a/doc/build/faq/sessions.rst
+++ b/doc/build/faq/sessions.rst
@@ -417,6 +417,77 @@ The recipe `ExpireRelationshipOnFKChange <http://www.sqlalchemy.org/trac/wiki/Us
in order to coordinate the setting of foreign key attributes with many-to-one
relationships.
+.. _faq_walk_objects:
+
+How do I walk all objects that are related to a given object?
+-------------------------------------------------------------
+
+An object that has other objects related to it will correspond to the
+:func:`.relationship` constructs set up between mappers. This code fragment will
+iterate all the objects, correcting for cycles as well::
+
+ from sqlalchemy import inspect
+
+
+ def walk(obj):
+ deque = [obj]
+
+ seen = set()
+
+ while deque:
+ obj = deque.pop(0)
+ if obj in seen:
+ continue
+ else:
+ seen.add(obj)
+ yield obj
+ insp = inspect(obj)
+ for relationship in insp.mapper.relationships:
+ related = getattr(obj, relationship.key)
+ if relationship.uselist:
+ deque.extend(related)
+ elif related is not None:
+ deque.append(related)
+
+The function can be demonstrated as follows::
+
+ Base = declarative_base()
+
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+ bs = relationship("B", backref="a")
+
+
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+ a_id = Column(ForeignKey('a.id'))
+ c_id = Column(ForeignKey('c.id'))
+ c = relationship("C", backref="bs")
+
+
+ class C(Base):
+ __tablename__ = 'c'
+ id = Column(Integer, primary_key=True)
+
+
+ a1 = A(bs=[B(), B(c=C())])
+
+
+ for obj in walk(a1):
+ print obj
+
+Output::
+
+ <__main__.A object at 0x10303b190>
+ <__main__.B object at 0x103025210>
+ <__main__.B object at 0x10303b0d0>
+ <__main__.C object at 0x103025490>
+
+
+
Is there a way to automagically have only unique keywords (or other kinds of objects) without doing a query for the keyword and getting a reference to the row containing that keyword?
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst
index c0ecee84b..9c1395f14 100644
--- a/doc/build/glossary.rst
+++ b/doc/build/glossary.rst
@@ -1019,7 +1019,7 @@ Glossary
http://en.wikipedia.org/wiki/Unique_key#Defining_unique_keys
transient
- This describes one of the four major object states which
+ This describes one of the major object states which
an object can have within a :term:`session`; a transient object
is a new object that doesn't have any database identity
and has not been associated with a session yet. When the
@@ -1031,7 +1031,7 @@ Glossary
:ref:`session_object_states`
pending
- This describes one of the four major object states which
+ This describes one of the major object states which
an object can have within a :term:`session`; a pending object
is a new object that doesn't have any database identity,
but has been recently associated with a session. When
@@ -1042,8 +1042,23 @@ Glossary
:ref:`session_object_states`
+ deleted
+ This describes one of the major object states which
+ an object can have within a :term:`session`; a deleted object
+ is an object that was formerly persistent and has had a
+ DELETE statement emitted to the database within a flush
+ to delete its row. The object will move to the :term:`detached`
+ state once the session's transaction is committed; alternatively,
+ if the session's transaction is rolled back, the DELETE is
+ reverted and the object moves back to the :term:`persistent`
+ state.
+
+ .. seealso::
+
+ :ref:`session_object_states`
+
persistent
- This describes one of the four major object states which
+ This describes one of the major object states which
an object can have within a :term:`session`; a persistent object
is an object that has a database identity (i.e. a primary key)
and is currently associated with a session. Any object
@@ -1058,7 +1073,7 @@ Glossary
:ref:`session_object_states`
detached
- This describes one of the four major object states which
+ This describes one of the major object states which
an object can have within a :term:`session`; a detached object
is an object that has a database identity (i.e. a primary key)
but is not associated with any session. An object that
diff --git a/doc/build/index.rst b/doc/build/index.rst
index 1990df8e2..a28dfca82 100644
--- a/doc/build/index.rst
+++ b/doc/build/index.rst
@@ -14,7 +14,7 @@ A high level view and getting set up.
:doc:`Overview <intro>` |
:ref:`Installation Guide <installation>` |
:doc:`Frequently Asked Questions <faq/index>` |
-:doc:`Migration from 0.9 <changelog/migration_10>` |
+:doc:`Migration from 1.0 <changelog/migration_11>` |
:doc:`Glossary <glossary>` |
:doc:`Changelog catalog <changelog/index>`
diff --git a/doc/build/intro.rst b/doc/build/intro.rst
index 3231bfe9c..ca5662f03 100644
--- a/doc/build/intro.rst
+++ b/doc/build/intro.rst
@@ -84,18 +84,14 @@ releases as well, depending on the state of Jython itself.
Supported Installation Methods
-------------------------------
-SQLAlchemy supports installation using standard Python "distutils" or
-"setuptools" methodologies. An overview of potential setups is as follows:
-
-* **Plain Python Distutils** - SQLAlchemy can be installed with a clean
- Python install using the services provided via `Python Distutils <http://docs.python.org/distutils/>`_,
- using the ``setup.py`` script. The C extensions as well as Python 3 builds are supported.
-* **Setuptools or Distribute** - When using `setuptools <http://pypi.python.org/pypi/setuptools/>`_,
- SQLAlchemy can be installed via ``setup.py`` or ``easy_install``, and the C
- extensions are supported.
-* **pip** - `pip <http://pypi.python.org/pypi/pip/>`_ is an installer that
- rides on top of ``setuptools`` or ``distribute``, replacing the usage
- of ``easy_install``. It is often preferred for its simpler mode of usage.
+SQLAlchemy installation is via standard Python methodologies that are
+based on `setuptools <http://pypi.python.org/pypi/setuptools/>`_, either
+by referring to ``setup.py`` directly or by using
+`pip <http://pypi.python.org/pypi/pip/>`_ or other setuptools-compatible
+approaches.
+
+.. versionchanged:: 1.1 setuptools is now required by the setup.py file;
+ plain distutils installs are no longer supported.
Install via pip
---------------
@@ -108,7 +104,7 @@ downloaded from Pypi and installed in one step::
This command will download the latest **released** version of SQLAlchemy from the `Python
Cheese Shop <http://pypi.python.org/pypi/SQLAlchemy>`_ and install it to your system.
-In order to install the latest **prerelease** version, such as ``1.0.0b1``,
+In order to install the latest **prerelease** version, such as ``1.1.0b1``,
pip requires that the ``--pre`` flag be used::
pip install --pre SQLAlchemy
@@ -124,6 +120,8 @@ Otherwise, you can install from the distribution using the ``setup.py`` script::
python setup.py install
+.. _c_extensions:
+
Installing the C Extensions
----------------------------------
@@ -131,10 +129,6 @@ SQLAlchemy includes C extensions which provide an extra speed boost for
dealing with result sets. The extensions are supported on both the 2.xx
and 3.xx series of cPython.
-.. versionchanged:: 0.9.0
-
- The C extensions now compile on Python 3 as well as Python 2.
-
``setup.py`` will automatically build the extensions if an appropriate platform is
detected. If the build of the C extensions fails, due to missing compiler or
other issue, the setup process will output a warning message, and re-run the
@@ -146,26 +140,11 @@ use case for this is either for special testing circumstances, or in the rare
case of compatibility/build issues not overcome by the usual "rebuild"
mechanism::
- # *** only in SQLAlchemy 0.9.4 / 0.8.6 or greater ***
export DISABLE_SQLALCHEMY_CEXT=1; python setup.py install
-.. versionadded:: 0.9.4,0.8.6 Support for disabling the build of
- C extensions using the ``DISABLE_SQLALCHEMY_CEXT`` environment variable
- has been added. This allows control of C extension building whether or not
- setuptools is available, and additionally works around the fact that
- setuptools will possibly be **removing support** for command-line switches
- such as ``--without-extensions`` in a future release.
-
- For versions of SQLAlchemy prior to 0.9.4 or 0.8.6, the
- ``--without-cextensions`` option may be used to disable the attempt to build
- C extensions, provided setupools is in use, and provided the ``Feature``
- construct is supported by the installed version of setuptools::
-
- python setup.py --without-cextensions install
-
- Or with pip::
-
- pip install --global-option='--without-cextensions' SQLAlchemy
+.. versionchanged:: 1.1 The legacy ``--without-cextensions`` flag has been
+ removed from the installer as it relies on deprecated features of
+ setuptools.
Installing on Python 3
@@ -174,8 +153,6 @@ Installing on Python 3
SQLAlchemy runs directly on Python 2 or Python 3, and can be installed in
either environment without any adjustments or code conversion.
-.. versionchanged:: 0.9.0 Python 3 is now supported in place with no 2to3 step
- required.
Installing a Database API
@@ -189,7 +166,7 @@ the available DBAPIs for each database, including external links.
Checking the Installed SQLAlchemy Version
------------------------------------------
-This documentation covers SQLAlchemy version 1.0. If you're working on a
+This documentation covers SQLAlchemy version 1.1. If you're working on a
system that already has SQLAlchemy installed, check the version from your
Python prompt like this:
@@ -197,11 +174,11 @@ Python prompt like this:
>>> import sqlalchemy
>>> sqlalchemy.__version__ # doctest: +SKIP
- 1.0.0
+ 1.1.0
.. _migration:
-0.9 to 1.0 Migration
+1.0 to 1.1 Migration
=====================
-Notes on what's changed from 0.9 to 1.0 is available here at :doc:`changelog/migration_10`.
+Notes on what's changed from 1.0 to 1.1 is available here at :doc:`changelog/migration_11`.
diff --git a/doc/build/orm/events.rst b/doc/build/orm/events.rst
index e9673bed0..470a9386b 100644
--- a/doc/build/orm/events.rst
+++ b/doc/build/orm/events.rst
@@ -5,12 +5,10 @@ ORM Events
The ORM includes a wide variety of hooks available for subscription.
-.. versionadded:: 0.7
- The event supersedes the previous system of "extension" classes.
-
-For an introduction to the event API, see :ref:`event_toplevel`. Non-ORM events
-such as those regarding connections and low-level statement execution are described in
-:ref:`core_event_toplevel`.
+For an introduction to the most commonly used ORM events, see the section
+:ref:`session_events_toplevel`. The event system in general is discussed
+at :ref:`event_toplevel`. Non-ORM events such as those regarding connections
+and low-level statement execution are described in :ref:`core_event_toplevel`.
Attribute Events
----------------
diff --git a/doc/build/orm/examples.rst b/doc/build/orm/examples.rst
index 4db7c00dc..25d243022 100644
--- a/doc/build/orm/examples.rst
+++ b/doc/build/orm/examples.rst
@@ -93,6 +93,8 @@ Versioning with a History Table
.. automodule:: examples.versioned_history
+.. _examples_versioned_rows:
+
Versioning using Temporal Rows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst
index 0713634bc..290d8099e 100644
--- a/doc/build/orm/inheritance.rst
+++ b/doc/build/orm/inheritance.rst
@@ -228,9 +228,9 @@ subclasses:
entity = with_polymorphic(Employee, [Engineer, Manager])
# join to all subclass tables
- entity = query.with_polymorphic(Employee, '*')
+ entity = with_polymorphic(Employee, '*')
- # use with Query
+ # use the 'entity' with a Query object
session.query(entity).all()
It also accepts a third argument ``selectable`` which replaces the automatic
@@ -249,7 +249,7 @@ should be used to load polymorphically::
employee.outerjoin(manager).outerjoin(engineer)
)
- # use with Query
+ # use the 'entity' with a Query object
session.query(entity).all()
Note that if you only need to load a single subtype, such as just the
diff --git a/doc/build/orm/persistence_techniques.rst b/doc/build/orm/persistence_techniques.rst
index aee48121d..a30d486b5 100644
--- a/doc/build/orm/persistence_techniques.rst
+++ b/doc/build/orm/persistence_techniques.rst
@@ -78,6 +78,112 @@ proper context for the desired engine::
connection = session.connection(MyMappedClass)
+.. _session_forcing_null:
+
+Forcing NULL on a column with a default
+=======================================
+
+The ORM considers any attribute that was never set on an object as a
+"default" case; the attribute will be omitted from the INSERT statement::
+
+ class MyObject(Base):
+ __tablename__ = 'my_table'
+ id = Column(Integer, primary_key=True)
+ data = Column(String(50), nullable=True)
+
+ obj = MyObject(id=1)
+ session.add(obj)
+ session.commit() # INSERT with the 'data' column omitted; the database
+ # itself will persist this as the NULL value
+
+Omitting a column from the INSERT means that the column will
+have the NULL value set, *unless* the column has a default set up,
+in which case the default value will be persisted. This holds true
+both from a pure SQL perspective with server-side defaults, as well as the
+behavior of SQLAlchemy's insert behavior with both client-side and server-side
+defaults::
+
+ class MyObject(Base):
+ __tablename__ = 'my_table'
+ id = Column(Integer, primary_key=True)
+ data = Column(String(50), nullable=True, server_default="default")
+
+ obj = MyObject(id=1)
+ session.add(obj)
+ session.commit() # INSERT with the 'data' column omitted; the database
+ # itself will persist this as the value 'default'
+
+However, in the ORM, even if one assigns the Python value ``None`` explicitly
+to the object, this is treated the **same** as though the value were never
+assigned::
+
+ class MyObject(Base):
+ __tablename__ = 'my_table'
+ id = Column(Integer, primary_key=True)
+ data = Column(String(50), nullable=True, server_default="default")
+
+ obj = MyObject(id=1, data=None)
+ session.add(obj)
+ session.commit() # INSERT with the 'data' column explicitly set to None;
+ # the ORM still omits it from the statement and the
+ # database will still persist this as the value 'default'
+
+The above operation will persist into the ``data`` column the
+server default value of ``"default"`` and not SQL NULL, even though ``None``
+was passed; this is a long-standing behavior of the ORM that many applications
+hold as an assumption.
+
+So what if we want to actually put NULL into this column, even though the
+column has a default value? There are two approaches. One is that
+on a per-instance level, we assign the attribute using the
+:obj:`~.expression.null` SQL construct::
+
+ from sqlalchemy import null
+
+ obj = MyObject(id=1, data=null())
+ session.add(obj)
+ session.commit() # INSERT with the 'data' column explicitly set as null();
+ # the ORM uses this directly, bypassing all client-
+ # and server-side defaults, and the database will
+ # persist this as the NULL value
+
+The :obj:`~.expression.null` SQL construct always translates into the SQL
+NULL value being directly present in the target INSERT statement.
+
+If we'd like to be able to use the Python value ``None`` and have this
+also be persisted as NULL despite the presence of column defaults,
+we can configure this for the ORM using a Core-level modifier
+:meth:`.TypeEngine.evaluates_none`, which indicates
+a type where the ORM should treat the value ``None`` the same as any other
+value and pass it through, rather than omitting it as a "missing" value::
+
+ class MyObject(Base):
+ __tablename__ = 'my_table'
+ id = Column(Integer, primary_key=True)
+ data = Column(
+ String(50).evaluates_none(), # indicate that None should always be passed
+ nullable=True, server_default="default")
+
+ obj = MyObject(id=1, data=None)
+ session.add(obj)
+ session.commit() # INSERT with the 'data' column explicitly set to None;
+ # the ORM uses this directly, bypassing all client-
+ # and server-side defaults, and the database will
+ # persist this as the NULL value
+
+.. topic:: Evaluating None
+
+ The :meth:`.TypeEngine.evaluates_none` modifier is primarily intended to
+ signal a type where the Python value "None" is significant, the primary
+ example being a JSON type which may want to persist the JSON ``null`` value
+ rather than SQL NULL. We are slightly repurposing it here in order to
+ signal to the ORM that we'd like ``None`` to be passed into the type whenever
+ present, even though no special type-level behaviors are assigned to it.
+
+.. versionadded:: 1.1 added the :meth:`.TypeEngine.evaluates_none` method
+ in order to indicate that a "None" value should be treated as significant.
+
+
.. _session_partitioning:
Partitioning Strategies
diff --git a/doc/build/orm/relationship_persistence.rst b/doc/build/orm/relationship_persistence.rst
index 8af96cbd6..d4fca2c93 100644
--- a/doc/build/orm/relationship_persistence.rst
+++ b/doc/build/orm/relationship_persistence.rst
@@ -172,56 +172,108 @@ Mutable Primary Keys / Update Cascades
When the primary key of an entity changes, related items
which reference the primary key must also be updated as
well. For databases which enforce referential integrity,
-it's required to use the database's ON UPDATE CASCADE
+the best strategy is to use the database's ON UPDATE CASCADE
functionality in order to propagate primary key changes
to referenced foreign keys - the values cannot be out
-of sync for any moment.
-
-For databases that don't support this, such as SQLite and
-MySQL without their referential integrity options turned
-on, the :paramref:`~.relationship.passive_updates` flag can
-be set to ``False``, most preferably on a one-to-many or
-many-to-many :func:`.relationship`, which instructs
-SQLAlchemy to issue UPDATE statements individually for
-objects referenced in the collection, loading them into
-memory if not already locally present. The
-:paramref:`~.relationship.passive_updates` flag can also be ``False`` in
-conjunction with ON UPDATE CASCADE functionality,
-although in that case the unit of work will be issuing
-extra SELECT and UPDATE statements unnecessarily.
-
-A typical mutable primary key setup might look like::
+of sync for any moment unless the constraints are marked as "deferrable",
+that is, not enforced until the transaction completes.
+
+It is **highly recommended** that an application which seeks to employ
+natural primary keys with mutable values to use the ``ON UPDATE CASCADE``
+capabilities of the database. An example mapping which
+illustrates this is::
class User(Base):
__tablename__ = 'user'
+ __table_args__ = {'mysql_engine': 'InnoDB'}
username = Column(String(50), primary_key=True)
fullname = Column(String(100))
- # passive_updates=False *only* needed if the database
- # does not implement ON UPDATE CASCADE
- addresses = relationship("Address", passive_updates=False)
+ addresses = relationship("Address")
+
class Address(Base):
__tablename__ = 'address'
+ __table_args__ = {'mysql_engine': 'InnoDB'}
email = Column(String(50), primary_key=True)
username = Column(String(50),
ForeignKey('user.username', onupdate="cascade")
)
-:paramref:`~.relationship.passive_updates` is set to ``True`` by default,
-indicating that ON UPDATE CASCADE is expected to be in
-place in the usual case for foreign keys that expect
-to have a mutating parent key.
-
-A :paramref:`~.relationship.passive_updates` setting of False may be configured on any
-direction of relationship, i.e. one-to-many, many-to-one,
-and many-to-many, although it is much more effective when
-placed just on the one-to-many or many-to-many side.
-Configuring the :paramref:`~.relationship.passive_updates`
-to False only on the
-many-to-one side will have only a partial effect, as the
-unit of work searches only through the current identity
-map for objects that may be referencing the one with a
-mutating primary key, not throughout the database.
+Above, we illustrate ``onupdate="cascade"`` on the :class:`.ForeignKey`
+object, and we also illustrate the ``mysql_engine='InnoDB'`` setting
+which, on a MySQL backend, ensures that the ``InnoDB`` engine supporting
+referential integrity is used. When using SQLite, referential integrity
+should be enabled, using the configuration described at
+:ref:`sqlite_foreign_keys`.
+
+Simulating limited ON UPDATE CASCADE without foreign key support
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In those cases when a database that does not support referential integrity
+is used, and natural primary keys with mutable values are in play,
+SQLAlchemy offers a feature in order to allow propagation of primary key
+values to already-referenced foreign keys to a **limited** extent,
+by emitting an UPDATE statement against foreign key columns that immediately
+reference a primary key column whose value has changed.
+The primary platforms without referential integrity features are
+MySQL when the ``MyISAM`` storage engine is used, and SQLite when the
+``PRAGMA foreign_keys=ON`` pragma is not used. The Oracle database also
+has no support for ``ON UPDATE CASCADE``, but because it still enforces
+referential integrity, needs constraints to be marked as deferrable
+so that SQLAlchemy can emit UPDATE statements.
+
+The feature is enabled by setting the
+:paramref:`~.relationship.passive_updates` flag to ``False``,
+most preferably on a one-to-many or
+many-to-many :func:`.relationship`. When "updates" are no longer
+"passive" this indicates that SQLAlchemy will
+issue UPDATE statements individually for
+objects referenced in the collection referred to by the parent object
+with a changing primary key value. This also implies that collections
+will be fully loaded into memory if not already locally present.
+
+Our previous mapping using ``passive_updates=False`` looks like::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ username = Column(String(50), primary_key=True)
+ fullname = Column(String(100))
+
+ # passive_updates=False *only* needed if the database
+ # does not implement ON UPDATE CASCADE
+ addresses = relationship("Address", passive_updates=False)
+
+ class Address(Base):
+ __tablename__ = 'address'
+
+ email = Column(String(50), primary_key=True)
+ username = Column(String(50), ForeignKey('user.username'))
+
+Key limitations of ``passive_updates=False`` include:
+
+* it performs much more poorly than direct database ON UPDATE CASCADE,
+ because it needs to fully pre-load affected collections using SELECT
+ and also must emit UPDATE statements against those values, which it
+ will attempt to run in "batches" but still runs on a per-row basis
+ at the DBAPI level.
+
+* the feature cannot "cascade" more than one level. That is,
+ if mapping X has a foreign key which refers to the primary key
+ of mapping Y, but then mapping Y's primary key is itself a foreign key
+ to mapping Z, ``passive_updates=False`` cannot cascade a change in
+ primary key value from ``Z`` to ``X``.
+
+* Configuring ``passive_updates=False`` only on the many-to-one
+ side of a relationship will not have a full effect, as the
+ unit of work searches only through the current identity
+ map for objects that may be referencing the one with a
+ mutating primary key, not throughout the database.
+
+As virtually all databases other than Oracle now support ``ON UPDATE CASCADE``,
+it is highly recommended that traditional ``ON UPDATE CASCADE`` support be used
+in the case that natural and mutable primary key values are in use.
+
diff --git a/doc/build/orm/session.rst b/doc/build/orm/session.rst
index 624ee9f75..79ea70137 100644
--- a/doc/build/orm/session.rst
+++ b/doc/build/orm/session.rst
@@ -20,6 +20,7 @@ configured, the primary usage interface for persistence operations is the
session_transaction
persistence_techniques
contextual
+ session_events
session_api
diff --git a/doc/build/orm/session_events.rst b/doc/build/orm/session_events.rst
new file mode 100644
index 000000000..50c63e6ea
--- /dev/null
+++ b/doc/build/orm/session_events.rst
@@ -0,0 +1,433 @@
+.. _session_events_toplevel:
+
+Tracking Object and Session Changes with Events
+===============================================
+
+SQLAlchemy features an extensive :ref:`Event Listening <event_toplevel>`
+system used throughout the Core and ORM. Within the ORM, there are a
+wide variety of event listener hooks, which are documented at an API
+level at :ref:`orm_event_toplevel`. This collection of events has
+grown over the years to include lots of very useful new events as well
+as some older events that aren't as relevant as they once were. This
+section will attempt to introduce the major event hooks and when they
+might be used.
+
+.. _session_persistence_events:
+
+Persistence Events
+------------------
+
+Probably the most widely used series of events are the "persistence" events,
+which correspond to the :ref:`flush process<session_flushing>`.
+The flush is where all the decisions are made about pending changes to
+objects and are then emitted out to the database in the form of INSERT,
+UPDATE, and DELETE staetments.
+
+``before_flush()``
+^^^^^^^^^^^^^^^^^^
+
+The :meth:`.SessionEvents.before_flush` hook is by far the most generally
+useful event to use when an application wants to ensure that
+additional persistence changes to the database are made when a flush proceeds.
+Use :meth:`.SessionEvents.before_flush` in order to operate
+upon objects to validate their state as well as to compose additional objects
+and references before they are persisted. Within this event,
+it is **safe to manipulate the Session's state**, that is, new objects
+can be attached to it, objects can be deleted, and indivual attributes
+on objects can be changed freely, and these changes will be pulled into
+the flush process when the event hook completes.
+
+The typical :meth:`.SessionEvents.before_flush` hook will be tasked with
+scanning the collections :attr:`.Session.new`, :attr:`.Session.dirty` and
+:attr:`.Session.deleted` in order to look for objects
+where something will be happening.
+
+For illustrations of :meth:`.SessionEvents.before_flush`, see
+examples such as :ref:`examples_versioned_history` and
+:ref:`examples_versioned_rows`.
+
+``after_flush()``
+^^^^^^^^^^^^^^^^^
+
+The :meth:`.SessionEvents.after_flush` hook is called after the SQL has been
+emitted for a flush process, but **before* the state of the objects that
+were flushed has been altered. That is, you can still inspect
+the :attr:`.Session.new`, :attr:`.Session.dirty` and
+:attr:`.Session.deleted` collections to see what was just flushed, and
+you can also use history tracking features like the ones provided
+by :class:`.AttributeState` to see what changes were just persisted.
+In the :meth:`.SessionEvents.after_flush` event, additional SQL can be emitted
+to the database based on what's observed to have changed.
+
+``after_flush_postexec()``
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+:meth:`.SessionEvents.after_flush_postexec` is called soon after
+:meth:`.SessionEvents.after_flush`, but is invoked **after** the state of
+the objects has been modified to account for the flush that just took place.
+The :attr:`.Session.new`, :attr:`.Session.dirty` and
+:attr:`.Session.deleted` collections are normally completely empty here.
+Use :meth:`.SessionEvents.after_flush_postexec` to inspect the identity map
+for finalized objects and possibly emit additional SQL. In this hook,
+there is the ability to make new changes on objects, which means the
+:class:`.Session` will again go into a "dirty" state; the mechanics of the
+:class:`.Session` here will cause it to flush **again** if new changes
+are detected in this hook. A counter ensures that an endless loop in this
+regard is stopped after 100 iterations, in the case that an
+:meth:`.SessionEvents.after_flush_postexec`
+hook continually adds new state to be flushed each time it is called.
+
+.. _session_persistence_mapper:
+
+Mapper-level Events
+^^^^^^^^^^^^^^^^^^^
+
+In addition to the flush-level hooks, there is also a suite of hooks
+that are more fine-grained, in that they are called on a per-object
+basis and are broken out based on INSERT, UPDATE or DELETE. These
+are the mapper persistence hooks, and they too are very popular,
+however these events need to be approached more cautiously, as they
+proceed within the context of the flush process that is already
+ongoing; many operations are not safe to proceed here.
+
+The events are:
+
+* :meth:`.MapperEvents.before_insert`
+* :meth:`.MapperEvents.after_insert`
+* :meth:`.MapperEvents.before_update`
+* :meth:`.MapperEvents.after_update`
+* :meth:`.MapperEvents.before_delete`
+* :meth:`.MapperEvents.after_delete`
+
+Each event is passed the :class:`.Mapper`,
+the mapped object itself, and the :class:`.Connection` which is being
+used to emit an INSERT, UPDATE or DELETE statement. The appeal of these
+events is clear, in that if an application wants to tie some activity to
+when a specific type of object is persisted with an INSERT, the hook is
+very specific; unlike the :meth:`.SessionEvents.before_flush` event,
+there's no need to search through collections like :attr:`.Session.new`
+in order to find targets. However, the flush plan which
+represents the full list of every single INSERT, UPDATE, DELETE statement
+to be emitted has *already been decided* when these events are called,
+and no changes may be made at this stage. Therefore the only changes that are
+even possible to the given objects are upon attributes **local** to the
+object's row. Any other change to the object or other objects will
+impact the state of the :class:`.Session`, which will fail to function
+properly.
+
+Operations that are not supported within these mapper-level persistence
+events include:
+
+* :meth:`.Session.add`
+* :meth:`.Session.delete`
+* Mapped collection append, add, remove, delete, discard, etc.
+* Mapped relationship attribute set/del events,
+ i.e. ``someobject.related = someotherobject``
+
+The reason the :class:`.Connection` is passed is that it is encouraged that
+**simple SQL operations take place here**, directly on the :class:`.Connection`,
+such as incrementing counters or inserting extra rows within log tables.
+When dealing with the :class:`.Connection`, it is expected that Core-level
+SQL operations will be used; e.g. those described in :ref:`sqlexpression_toplevel`.
+
+There are also many per-object operations that don't need to be handled
+within a flush event at all. The most common alternative is to simply
+establish additional state along with an object inside its ``__init__()``
+method, such as creating additional objects that are to be associated with
+the new object. Using validators as described in :ref:`simple_validators` is
+another approach; these functions can intercept changes to attributes and
+establish additional state changes on the target object in response to the
+attribute change. With both of these approaches, the object is in
+the correct state before it ever gets to the flush step.
+
+.. _session_lifecycle_events:
+
+Object Lifecycle Events
+-----------------------
+
+Another use case for events is to track the lifecycle of objects. This
+refers to the states first introduced at :ref:`session_object_states`.
+
+.. versionadded:: 1.1 added a system of events that intercept all possible
+ state transitions of an object within the :class:`.Session`.
+
+All the states above can be tracked fully with events. Each event
+represents a distinct state transition, meaning, the starting state
+and the destination state are both part of what are tracked. With the
+exception of the initial transient event, all the events are in terms of
+the :class:`.Session` object or class, meaning they can be associated either
+with a specific :class:`.Session` object::
+
+ from sqlalchemy import event
+ from sqlalchemy.orm import Session
+
+ session = Session()
+
+ @event.listens_for(session, 'transient_to_pending')
+ def object_is_pending(session, obj):
+ print("new pending: %s" % obj)
+
+Or with the :class:`.Session` class itself, as well as with a specific
+:class:`.sessionmaker`, which is likely the most useful form::
+
+ from sqlalchemy import event
+ from sqlalchemy.orm import sessionmaker
+
+ maker = sessionmaker()
+
+ @event.listens_for(maker, 'transient_to_pending')
+ def object_is_pending(session, obj):
+ print("new pending: %s" % obj)
+
+The listeners can of course be stacked on top of one function, as is
+likely to be common. For example, to track all objects that are
+entering the persistent state::
+
+ @event.listens_for(maker, "pending_to_persistent")
+ @event.listens_for(maker, "deleted_to_persistent")
+ @event.listens_for(maker, "detached_to_persistent")
+ @event.listens_for(maker, "loaded_as_persistent")
+ def detect_all_persistent(session, instance):
+ print("object is now persistent: %s" % instance)
+
+Transient
+^^^^^^^^^
+
+All mapped objects when first constructed start out as :term:`transient`.
+In this state, the object exists alone and doesn't have an association with
+any :class:`.Session`. For this initial state, there's no specific
+"transition" event since there is no :class:`.Session`, however if one
+wanted to intercept when any transient object is created, the
+:meth:`.InstanceEvents.init` method is probably the best event. This
+event is applied to a specific class or superclass. For example, to
+intercept all new objects for a particular declarative base::
+
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy import event
+
+ Base = declarative_base()
+
+ @event.listens_for(Base, "init", propagate=True)
+ def intercept_init(instance, args, kwargs):
+ print("new transient: %s" % instance)
+
+
+Transient to Pending
+^^^^^^^^^^^^^^^^^^^^
+
+The transient object becomes :term:`pending` when it is first associated
+with a :class:`.Session` via the :meth:`.Session.add` or :meth:`.Session.add_all`
+method. An object may also become part of a :class:`.Session` as a result
+of a :ref:`"cascade" <unitofwork_cascades>` from a referencing object that was
+explicitly added. The transient to pending transition is detectable using
+the :meth:`.SessionEvents.transient_to_pending` event::
+
+ @event.listens_for(sessionmaker, "transient_to_pending")
+ def intercept_transient_to_pending(session, object_):
+ print("transient to pending: %s" % object_)
+
+
+Pending to Persistent
+^^^^^^^^^^^^^^^^^^^^^
+
+The :term:`pending` object becomes :term:`persistent` when a flush
+proceeds and an INSERT statement takes place for the instance. The object
+now has an identity key. Track pending to persistent with the
+:meth:`.SessionEvents.pending_to_persistent` event::
+
+ @event.listens_for(sessionmaker, "pending_to_persistent")
+ def intercept_pending_to_persistent(session, object_):
+ print("pending to persistent: %s" % object_)
+
+Pending to Transient
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The :term:`pending` object can revert back to :term:`transient` if the
+:meth:`.Session.rollback` method is called before the pending object
+has been flushed, or if the :meth:`.Session.expunge` method is called
+for the object before it is flushed. Track pending to transient with the
+:meth:`.SessionEvents.pending_to_transient` event::
+
+ @event.listens_for(sessionmaker, "pending_to_transient")
+ def intercept_pending_to_transient(session, object_):
+ print("transient to pending: %s" % object_)
+
+Loaded as Persistent
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Objects can appear in the :class:`.Session` directly in the :term:`persistent`
+state when they are loaded from the database. Tracking this state transition
+is synonymous with tracking objects as they are loaded, and is synonomous
+with using the :meth:`.InstanceEvents.load` instance-level event. However, the
+:meth:`.SessionEvents.loaded_as_persistent` event is provided as a
+session-centric hook for intercepting objects as they enter the persistent
+state via this particular avenue::
+
+ @event.listens_for(sessionmaker, "loaded_as_persistent")
+ def intercept_loaded_as_persistent(session, object_):
+ print("object loaded into persistent state: %s" % object_)
+
+
+Persistent to Transient
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The persistent object can revert to the transient state if the
+:meth:`.Session.rollback` method is called for a transaction where the
+object was first added as pending. In the case of the ROLLBACK, the
+INSERT statement that made this object persistent is rolled back, and
+the object is evicted from the :class:`.Session` to again become transient.
+Track objects that were reverted to transient from
+persistent using the :meth:`.SessionEvents.persistent_to_transient`
+event hook::
+
+ @event.listens_for(sessionmaker, "persistent_to_transient")
+ def intercept_persistent_to_transient(session, object_):
+ print("persistent to transient: %s" % object_)
+
+Persistent to Deleted
+^^^^^^^^^^^^^^^^^^^^^
+
+The persistent object enters the :term:`deleted` state when an object
+marked for deletion is deleted from the database within the flush
+process. Note that this is **not the same** as when the :meth:`.Session.delete`
+method is called for a target object. The :meth:`.Session.delete`
+method only **marks** the object for deletion; the actual DELETE statement
+is not emitted until the flush proceeds. It is subsequent to the flush
+that the "deleted" state is present for the target object.
+
+Within the "deleted" state, the object is only marginally associated
+with the :class:`.Session`. It is not present in the identity map
+nor is it present in the :attr:`.Session.deleted` collection that refers
+to when it was pending for deletion.
+
+From the "deleted" state, the object can go either to the detached state
+when the transaction is committed, or back to the persistent state
+if the transaction is instead rolled back.
+
+Track the persistent to deleted transition with
+:meth:`.SessionEvents.persistent_to_deleted`::
+
+ @event.listens_for(sessionmaker, "persistent_to_deleted")
+ def intercept_persistent_to_deleted(session, object_):
+ print("object was DELETEd, is now in deleted state: %s" % object_)
+
+
+Deleted to Detached
+^^^^^^^^^^^^^^^^^^^^
+
+The deleted object becomes :term:`detached` when the session's transaction
+is committed. After the :meth:`.Session.commit` method is called, the
+database transaction is final and the :class:`.Session` now fully discards
+the deleted object and removes all associations to it. Track
+the deleted to detached transition using :meth:`.SessionEvents.deleted_to_detached`::
+
+ @event.listens_for(sessionmaker, "deleted_to_detached")
+ def intercept_deleted_to_detached(session, object_):
+ print("deleted to detached: %s" % object_)
+
+
+.. note::
+
+ While the object is in the deleted state, the :attr:`.InstanceState.deleted`
+ attribute, accessible using ``inspect(object).deleted``, returns True. However
+ when the object is detached, :attr:`.InstanceState.deleted` will again
+ return False. To detect that an object was deleted, regardless of whether
+ or not it is detached, use the :attr:`.InstanceState.was_deleted`
+ accessor.
+
+
+Persistent to Detached
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The persistent object becomes :term:`detached` when the object is de-associated
+with the :class:`.Session`, via the :meth:`.Session.expunge`,
+:meth:`.Session.expunge_all`, or :meth:`.Session.close` methods.
+
+.. note::
+
+ An object may also become **implicitly detached** if its owning
+ :class:`.Session` is dereferenced by the application and discarded due to
+ garbage collection. In this case, **no event is emitted**.
+
+Track objects as they move from persistent to detached using the
+:meth:`.SessionEvents.persistent_to_detached` event::
+
+ @event.listens_for(sessionmaker, "persistent_to_detached")
+ def intecept_persistent_to_detached(session, object_):
+ print("object became detached: %s" % object_)
+
+Detached to Persistent
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The detached object becomes persistent when it is re-associated with a
+session using the :meth:`.Session.add` or equivalent method. Track
+objects moving back to persistent from detached using the
+:meth:`.SessionEvents.detached_to_persistent` event::
+
+ @event.listens_for(sessionmaker, "detached_to_persistent")
+ def intecept_detached_to_persistent(session, object_):
+ print("object became persistent again: %s" % object_)
+
+
+Deleted to Persistent
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The :term:`deleted` object can be reverted to the :term:`persistent`
+state when the transaction in which it was DELETEd was rolled back
+using the :meth:`.Session.rollback` method. Track deleted objects
+moving back to the persistent state using the
+:meth:`.SessionEvents.deleted_to_persistent` event::
+
+ @event.listens_for(sessionmaker, "transient_to_pending")
+ def intercept_transient_to_pending(session, object_):
+ print("transient to pending: %s" % object_)
+
+.. _session_transaction_events:
+
+Transaction Events
+------------------
+
+Transaction events allow an application to be notifed when transaction
+boundaries occur at the :class:`.Session` level as well as when the
+:class:`.Session` changes the transactional state on :class:`.Connection`
+objects.
+
+* :meth:`.SessionEvents.after_transaction_create`,
+ :meth:`.SessionEvents.after_transaction_end` - these events track the
+ logical transaction scopes of the :class:`.Session` in a way that is
+ not specific to individual database connections. These events are
+ intended to help with integration of transaction-tracking systems such as
+ ``zope.sqlalchemy``. Use these
+ events when the application needs to align some external scope with the
+ transactional scope of the :class:`.Session`. These hooks mirror
+ the "nested" transactional behavior of the :class:`.Session`, in that they
+ track logical "subtransactions" as well as "nested" (e.g. SAVEPOINT)
+ transactions.
+
+* :meth:`.SessionEvents.before_commit`, :meth:`.SessionEvents.after_commit`,
+ :meth:`.SessionEvents.after_begin`,
+ :meth:`.SessionEvents.after_rollback`, :meth:`.SessionEvents.after_soft_rollback` -
+ These events allow tracking of transaction events from the perspective
+ of database connections. :meth:`.SessionEvents.after_begin` in particular
+ is a per-connection event; a :class:`.Session` that maintains more than
+ one connection will emit this event for each connection individually
+ as those connections become used within the current transaction.
+ The rollback and commit events then refer to when the DBAPI connections
+ themselves have received rollback or commit instructions directly.
+
+Attribute Change Events
+-----------------------
+
+The attribute change events allow interception of when specific attributes
+on an object are modified. These events include :meth:`.AttributeEvents.set`,
+:meth:`.AttributeEvents.append`, and :meth:`.AttributeEvents.remove`. These
+events are extremely useful, particularly for per-object validation operations;
+however, it is often much more convenient to use a "validator" hook, which
+uses these hooks behind the scenes; see :ref:`simple_validators` for
+background on this. The attribute events are also behind the mechanics
+of backreferences. An example illustrating use of attribute events
+is in :ref:`examples_instrumentation`.
+
+
+
+
diff --git a/doc/build/orm/session_state_management.rst b/doc/build/orm/session_state_management.rst
index 1ca7ca2e4..090bf7674 100644
--- a/doc/build/orm/session_state_management.rst
+++ b/doc/build/orm/session_state_management.rst
@@ -23,16 +23,28 @@ It's helpful to know the states which an instance can have within a session:
existing instances (or moving persistent instances from other sessions into
your local session).
-* **Detached** - an instance which has a record in the database, but is not in
- any session. There's nothing wrong with this, and you can use objects
- normally when they're detached, **except** they will not be able to issue
- any SQL in order to load collections or attributes which are not yet loaded,
- or were marked as "expired".
-
-Knowing these states is important, since the
-:class:`.Session` tries to be strict about ambiguous
-operations (such as trying to save the same object to two different sessions
-at the same time).
+* **Deleted** - An instance which has been deleted within a flush, but
+ the transaction has not yet completed. Objects in this state are essentially
+ in the opposite of "pending" state; when the session's transaction is committed,
+ the object will move to the detached state. Alternatively, when
+ the session's transaction is rolled back, a deleted object moves
+ *back* to the persistent state.
+
+ .. versionchanged:: 1.1 The 'deleted' state is a newly added session
+ object state distinct from the 'persistent' state.
+
+* **Detached** - an instance which corresponds, or previously corresponded,
+ to a record in the database, but is not currently in any session.
+ The detached object will contain a database identity marker, however
+ because it is not associated with a session, it is unknown whether or not
+ this database identity actually exists in a target database. Detached
+ objects are safe to use normally, except that they have no ability to
+ load unloaded attributes or attributes that were previously marked
+ as "expired".
+
+For a deeper dive into all possible state transitions, see the
+section :ref:`session_lifecycle_events` which describes each transition
+as well as how to programmatically track each one.
Getting the Current State of an Object
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -53,8 +65,11 @@ the :func:`.inspect` system::
:attr:`.InstanceState.persistent`
+ :attr:`.InstanceState.deleted`
+
:attr:`.InstanceState.detached`
+.. _session_attributes:
Session Attributes
------------------
@@ -92,17 +107,80 @@ all objects which have had changes since they were last loaded or saved (i.e.
(Documentation: :attr:`.Session.new`, :attr:`.Session.dirty`,
:attr:`.Session.deleted`, :attr:`.Session.identity_map`).
-Note that objects within the session are by default *weakly referenced*. This
+
+.. _session_referencing_behavior:
+
+Session Referencing Behavior
+----------------------------
+
+Objects within the session are *weakly referenced*. This
means that when they are dereferenced in the outside application, they fall
out of scope from within the :class:`~sqlalchemy.orm.session.Session` as well
and are subject to garbage collection by the Python interpreter. The
exceptions to this include objects which are pending, objects which are marked
as deleted, or persistent objects which have pending changes on them. After a
full flush, these collections are all empty, and all objects are again weakly
-referenced. To disable the weak referencing behavior and force all objects
-within the session to remain until explicitly expunged, configure
-:class:`.sessionmaker` with the ``weak_identity_map=False``
-setting.
+referenced.
+
+To cause objects in the :class:`.Session` to remain strongly
+referenced, usually a simple approach is all that's needed. Examples
+of externally managed strong-referencing behavior include loading
+objects into a local dictionary keyed to their primary key, or into
+lists or sets for the span of time that they need to remain
+referenced. These collections can be associated with a
+:class:`.Session`, if desired, by placing them into the
+:attr:`.Session.info` dictionary.
+
+An event based approach is also feasable. A simple recipe that provides
+"strong referencing" behavior for all objects as they remain within
+the :term:`persistent` state is as follows::
+
+ from sqlalchemy import event
+
+ def strong_reference_session(session):
+ @event.listens_for(session, "pending_to_persistent")
+ @event.listens_for(session, "deleted_to_persistent")
+ @event.listens_for(session, "detached_to_persistent")
+ @event.listens_for(session, "loaded_as_persistent")
+ def strong_ref_object(sess, instance):
+ if 'refs' not in sess.info:
+ sess.info['refs'] = refs = set()
+ else:
+ refs = sess.info['refs']
+
+ refs.add(instance)
+
+
+ @event.listens_for(session, "persistent_to_detached")
+ @event.listens_for(session, "persistent_to_deleted")
+ @event.listens_for(session, "persistent_to_transient")
+ def deref_object(sess, instance):
+ sess.info['refs'].discard(instance)
+
+Above, we intercept the :meth:`.SessionEvents.pending_to_persistent`,
+:meth:`.SessionEvents.detached_to_persistent`,
+:meth:`.SessionEvents.deleted_to_persistent` and
+:meth:`.SessionEvents.loaded_as_persistent` event hooks in order to intercept
+objects as they enter the :term:`persistent` transition, and the
+:meth:`.SessionEvents.persistent_to_detached` and
+:meth:`.SessionEvents.persistent_to_deleted` hooks to intercept
+objects as they leave the persistent state.
+
+The above function may be called for any :class:`.Session` in order to
+provide strong-referencing behavior on a per-:class:`.Session` basis::
+
+ from sqlalchemy.orm import Session
+
+ my_session = Session()
+ strong_reference_session(my_session)
+
+It may also be called for any :class:`.sessionmaker`::
+
+ from sqlalchemy.orm import sessionmaker
+
+ maker = sessionmaker()
+ strong_reference_session(maker)
+
.. _unitofwork_merging:
diff --git a/doc/build/orm/session_transaction.rst b/doc/build/orm/session_transaction.rst
index bca3e944f..e27c15118 100644
--- a/doc/build/orm/session_transaction.rst
+++ b/doc/build/orm/session_transaction.rst
@@ -277,7 +277,7 @@ transactions set the flag ``twophase=True`` on the session::
Setting Transaction Isolation Levels
------------------------------------
-:term:`isolation` refers to the behavior of the transaction at the database
+:term:`Isolation` refers to the behavior of the transaction at the database
level in relation to other transactions occurring concurrently. There
are four well-known modes of isolation, and typically the Python DBAPI
allows these to be set on a per-connection basis, either through explicit
@@ -414,6 +414,12 @@ on the target connection, a warning is emitted::
:paramref:`.Session.connection.execution_options`
parameter to :meth:`.Session.connection`.
+Tracking Transaction State with Events
+--------------------------------------
+
+See the section :ref:`session_transaction_events` for an overview
+of the available event hooks for session transaction state changes.
+
.. _session_external_transaction:
Joining a Session into an External Transaction (such as for test suites)
@@ -513,3 +519,4 @@ everything is rolled back.
session.begin_nested()
# ... the tearDown() method stays the same
+
diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst
index 8871ce765..42e94338b 100644
--- a/doc/build/orm/tutorial.rst
+++ b/doc/build/orm/tutorial.rst
@@ -40,11 +40,11 @@ following text represents the expected return value.
Version Check
=============
-A quick check to verify that we are on at least **version 1.0** of SQLAlchemy::
+A quick check to verify that we are on at least **version 1.1** of SQLAlchemy::
>>> import sqlalchemy
>>> sqlalchemy.__version__ # doctest:+SKIP
- 1.0.0
+ 1.1.0
Connecting
==========
@@ -795,11 +795,17 @@ Here's a rundown of some of the most common operators used in
# or chain multiple filter()/filter_by() calls
query.filter(User.name == 'ed').filter(User.fullname == 'Ed Jones')
+ .. note:: Make sure you use :func:`.and_` and **not** the
+ Python ``and`` operator!
+
* :func:`OR <.sql.expression.or_>`::
from sqlalchemy import or_
query.filter(or_(User.name == 'ed', User.name == 'wendy'))
+ .. note:: Make sure you use :func:`.or_` and **not** the
+ Python ``or`` operator!
+
* :meth:`MATCH <.ColumnOperators.match>`::
query.filter(User.name.match('wendy'))
@@ -849,7 +855,7 @@ database results. Here's a brief tour:
('%ed', 1, 0)
{stop}<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>
-* :meth:`~.Query.one()`, fully fetches all rows, and if not
+* :meth:`~.Query.one()` fully fetches all rows, and if not
exactly one object identity or composite row is present in the result, raises
an error. With multiple rows found:
@@ -892,6 +898,11 @@ database results. Here's a brief tour:
web service, which may want to raise a "404 not found" when no results are found,
but raise an application error when multiple results are found.
+* :meth:`~.Query.one_or_none` is like :meth:`~.Query.one`, except that if no
+ results are found, it doesn't raise an error; it just returns ``None``. Like
+ :meth:`~.Query.one`, however, it does raise an error if multiple results are
+ found.
+
* :meth:`~.Query.scalar` invokes the :meth:`~.Query.one` method, and upon
success returns the first column of the row:
@@ -902,14 +913,13 @@ database results. Here's a brief tour:
{sql}>>> query.scalar() #doctest: +NORMALIZE_WHITESPACE
SELECT users.id AS users_id
FROM users
- WHERE users.name LIKE ? ORDER BY users.id
- LIMIT ? OFFSET ?
- ('%ed', 1, 0)
- {stop}7
+ WHERE users.name = ? ORDER BY users.id
+ ('ed',)
+ {stop}1
.. _orm_tutorial_literal_sql:
-Using Literal SQL
+Using Textual SQL
-----------------
Literal strings can be used flexibly with
@@ -969,31 +979,40 @@ mapper (below illustrated using an asterisk):
('ed',)
{stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>]
-You can use :meth:`~sqlalchemy.orm.query.Query.from_statement()` to go
-completely "raw", using string names to identify desired columns:
+Or alternatively, specify how the columns map to the :func:`.text` construct
+explicitly using the :meth:`.TextClause.columns` method:
+
+.. sourcecode:: python+sql
+
+ >>> stmt = text("SELECT name, id FROM users where name=:name")
+ >>> stmt = stmt.columns(User.name, User.id)
+ {sql}>>> session.query(User).from_statement(stmt).params(name='ed').all()
+ SELECT name, id FROM users where name=?
+ ('ed',)
+ {stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>]
+
+We can choose columns to return individually as well, as in any other case:
.. sourcecode:: python+sql
- {sql}>>> session.query("id", "name", "thenumber12").\
- ... from_statement(text("SELECT id, name, 12 as "
- ... "thenumber12 FROM users where name=:name")).\
- ... params(name='ed').all()
- SELECT id, name, 12 as thenumber12 FROM users where name=?
+ >>> stmt = text("SELECT name, id FROM users where name=:name")
+ >>> stmt = stmt.columns(User.name, User.id)
+ {sql}>>> session.query(User.id, User.name).\
+ ... from_statement(stmt).params(name='ed').all()
+ SELECT name, id FROM users where name=?
('ed',)
- {stop}[(1, u'ed', 12)]
+ {stop}[(1, u'ed')]
+
+.. seealso::
+
+ :ref:`sqlexpression_text` - The :func:`.text` construct explained
+ from the perspective of Core-only queries.
.. versionchanged:: 1.0.0
The :class:`.Query` construct emits warnings when string SQL
fragments are coerced to :func:`.text`, and :func:`.text` should
be used explicitly. See :ref:`migration_2992` for background.
-.. seealso::
-
- :ref:`sqlexpression_text` - Core description of textual segments. The
- behavior of the ORM :class:`.Query` object with regards to
- :func:`.text` and related constructs is very similar to that of the
- Core :func:`.select` object.
-
Counting
--------
@@ -1736,7 +1755,7 @@ attribute:
<User(name='jack', fullname='Jack Bean', password='gjffdd')>
For more information on eager loading, including how to configure various forms
-of loading by default, see the section :doc:`/orm/loading`.
+of loading by default, see the section :doc:`/orm/loading_relationships`.
Deleting
========
diff --git a/examples/versioned_history/history_meta.py b/examples/versioned_history/history_meta.py
index 6d7b137eb..866f2d473 100644
--- a/examples/versioned_history/history_meta.py
+++ b/examples/versioned_history/history_meta.py
@@ -210,13 +210,13 @@ def create_version(obj, session, deleted=False):
a, u, d = attributes.get_history(obj, prop.key)
if d:
- attr[hist_col.key] = d[0]
+ attr[prop.key] = d[0]
obj_changed = True
elif u:
- attr[hist_col.key] = u[0]
- else:
+ attr[prop.key] = u[0]
+ elif a:
# if the attribute had no value.
- attr[hist_col.key] = a[0]
+ attr[prop.key] = a[0]
obj_changed = True
if not obj_changed:
diff --git a/examples/versioned_history/test_versioning.py b/examples/versioned_history/test_versioning.py
index dde73a5ae..3ea240e11 100644
--- a/examples/versioned_history/test_versioning.py
+++ b/examples/versioned_history/test_versioning.py
@@ -614,3 +614,68 @@ class TestVersioning(TestCase, AssertsCompiledSQL):
sess.commit()
assert sc.version == 1
+
+ def test_create_double_flush(self):
+
+ class SomeClass(Versioned, self.Base, ComparableEntity):
+ __tablename__ = 'sometable'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(30))
+ other = Column(String(30))
+
+ self.create_tables()
+
+ sc = SomeClass()
+ self.session.add(sc)
+ self.session.flush()
+ sc.name = 'Foo'
+ self.session.flush()
+
+ assert sc.version == 2
+
+ def test_mutate_plain_column(self):
+ class Document(self.Base, Versioned):
+ __tablename__ = 'document'
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ name = Column(String, nullable=True)
+ description_ = Column('description', String, nullable=True)
+
+ self.create_tables()
+
+ document = Document()
+ self.session.add(document)
+ document.name = 'Foo'
+ self.session.commit()
+ document.name = 'Bar'
+ self.session.commit()
+
+ DocumentHistory = Document.__history_mapper__.class_
+ v2 = self.session.query(Document).one()
+ v1 = self.session.query(DocumentHistory).one()
+ self.assertEqual(v1.id, v2.id)
+ self.assertEqual(v2.name, 'Bar')
+ self.assertEqual(v1.name, 'Foo')
+
+ def test_mutate_named_column(self):
+ class Document(self.Base, Versioned):
+ __tablename__ = 'document'
+ id = Column(Integer, primary_key=True, autoincrement=True)
+ name = Column(String, nullable=True)
+ description_ = Column('description', String, nullable=True)
+
+ self.create_tables()
+
+ document = Document()
+ self.session.add(document)
+ document.description_ = 'Foo'
+ self.session.commit()
+ document.description_ = 'Bar'
+ self.session.commit()
+
+ DocumentHistory = Document.__history_mapper__.class_
+ v2 = self.session.query(Document).one()
+ v1 = self.session.query(DocumentHistory).one()
+ self.assertEqual(v1.id, v2.id)
+ self.assertEqual(v2.description_, 'Bar')
+ self.assertEqual(v1.description_, 'Foo')
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 093e90bbf..12d4e8d1c 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -8,7 +8,9 @@
from .sql import (
alias,
+ all_,
and_,
+ any_,
asc,
between,
bindparam,
@@ -52,6 +54,7 @@ from .sql import (
)
from .types import (
+ Array,
BIGINT,
BINARY,
BLOB,
@@ -120,7 +123,7 @@ from .schema import (
from .inspection import inspect
from .engine import create_engine, engine_from_config
-__version__ = '1.0.7'
+__version__ = '1.1.0b1'
def __go(lcls):
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index e64078d79..4aa39d560 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -166,56 +166,6 @@ how SQLAlchemy handles this:
This
is an auxilliary use case suitable for testing and bulk insert scenarios.
-.. _legacy_schema_rendering:
-
-Rendering of SQL statements that include schema qualifiers
----------------------------------------------------------
-
-When using :class:`.Table` metadata that includes a "schema" qualifier,
-such as::
-
- account_table = Table(
- 'account', metadata,
- Column('id', Integer, primary_key=True),
- Column('info', String(100)),
- schema="customer_schema"
- )
-
-The SQL Server dialect has a long-standing behavior that it will attempt
-to turn a schema-qualified table name into an alias, such as::
-
- >>> eng = create_engine("mssql+pymssql://mydsn")
- >>> print(account_table.select().compile(eng))
- SELECT account_1.id, account_1.info
- FROM customer_schema.account AS account_1
-
-This behavior is legacy, does not function correctly for many forms
-of SQL statements, and will be disabled by default in the 1.1 series
-of SQLAlchemy. As of 1.0.5, the above statement will produce the following
-warning::
-
- SAWarning: legacy_schema_aliasing flag is defaulted to True;
- some schema-qualified queries may not function correctly.
- Consider setting this flag to False for modern SQL Server versions;
- this flag will default to False in version 1.1
-
-This warning encourages the :class:`.Engine` to be created as follows::
-
- >>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=False)
-
-Where the above SELECT statement will produce::
-
- >>> print(account_table.select().compile(eng))
- SELECT customer_schema.account.id, customer_schema.account.info
- FROM customer_schema.account
-
-The warning will not emit if the ``legacy_schema_aliasing`` flag is set
-to either True or False.
-
-.. versionadded:: 1.0.5 - Added the ``legacy_schema_aliasing`` flag to disable
- the SQL Server dialect's legacy behavior with schema-qualified table
- names. This flag will default to False in version 1.1.
-
Collation Support
-----------------
@@ -322,6 +272,41 @@ behavior of this flag is as follows:
.. versionadded:: 1.0.0
+.. _legacy_schema_rendering:
+
+Legacy Schema Mode
+------------------
+
+Very old versions of the MSSQL dialect introduced the behavior such that a
+schema-qualified table would be auto-aliased when used in a
+SELECT statement; given a table::
+
+ account_table = Table(
+ 'account', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('info', String(100)),
+ schema="customer_schema"
+ )
+
+this legacy mode of rendering would assume that "customer_schema.account"
+would not be accepted by all parts of the SQL statement, as illustrated
+below::
+
+ >>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=True)
+ >>> print(account_table.select().compile(eng))
+ SELECT account_1.id, account_1.info
+ FROM customer_schema.account AS account_1
+
+This mode of behavior is now off by default, as it appears to have served
+no purpose; however in the case that legacy applications rely upon it,
+it is available using the ``legacy_schema_aliasing`` argument to
+:func:`.create_engine` as illustrated above.
+
+.. versionchanged:: 1.1 the ``legacy_schema_aliasing`` flag introduced
+ in version 1.0.5 to allow disabling of legacy mode for schemas now
+ defaults to False.
+
+
.. _mssql_indexes:
Clustered Index Support
@@ -1156,15 +1141,6 @@ class MSSQLCompiler(compiler.SQLCompiler):
def _schema_aliased_table(self, table):
if getattr(table, 'schema', None) is not None:
- if self.dialect._warn_schema_aliasing and \
- table.schema.lower() != 'information_schema':
- util.warn(
- "legacy_schema_aliasing flag is defaulted to True; "
- "some schema-qualified queries may not function "
- "correctly. Consider setting this flag to False for "
- "modern SQL Server versions; this flag will default to "
- "False in version 1.1")
-
if table not in self.tablealiases:
self.tablealiases[table] = table.alias()
return self.tablealiases[table]
@@ -1530,7 +1506,7 @@ class MSDialect(default.DefaultDialect):
max_identifier_length=None,
schema_name="dbo",
deprecate_large_types=None,
- legacy_schema_aliasing=None, **opts):
+ legacy_schema_aliasing=False, **opts):
self.query_timeout = int(query_timeout or 0)
self.schema_name = schema_name
@@ -1538,13 +1514,7 @@ class MSDialect(default.DefaultDialect):
self.max_identifier_length = int(max_identifier_length or 0) or \
self.max_identifier_length
self.deprecate_large_types = deprecate_large_types
-
- if legacy_schema_aliasing is None:
- self.legacy_schema_aliasing = True
- self._warn_schema_aliasing = True
- else:
- self.legacy_schema_aliasing = legacy_schema_aliasing
- self._warn_schema_aliasing = False
+ self.legacy_schema_aliasing = legacy_schema_aliasing
super(MSDialect, self).__init__(**opts)
@@ -1772,7 +1742,7 @@ class MSDialect(default.DefaultDialect):
MSNText, MSBinary, MSVarBinary,
sqltypes.LargeBinary):
if charlen == -1:
- charlen = 'max'
+ charlen = None
kwargs['length'] = charlen
if collation:
kwargs['collation'] = collation
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index fee05fd2d..4b3e5bcd1 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -1584,7 +1584,10 @@ class SET(_EnumeratedValues):
def column_expression(self, colexpr):
if self.retrieve_as_bitwise:
- return colexpr + 0
+ return sql.type_coerce(
+ sql.type_coerce(colexpr, sqltypes.Integer) + 0,
+ self
+ )
else:
return colexpr
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index 4aed45c14..dede3b21a 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -293,6 +293,7 @@ from .base import OracleCompiler, OracleDialect, OracleExecutionContext
from . import base as oracle
from ...engine import result as _result
from sqlalchemy import types as sqltypes, util, exc, processors
+from sqlalchemy import util
import random
import collections
import decimal
@@ -719,8 +720,10 @@ class OracleDialect_cx_oracle(OracleDialect):
# this occurs in tests with mock DBAPIs
self._cx_oracle_string_types = set()
self._cx_oracle_with_unicode = False
- elif self.cx_oracle_ver >= (5,) and not \
- hasattr(self.dbapi, 'UNICODE'):
+ elif util.py3k or (
+ self.cx_oracle_ver >= (5,) and not \
+ hasattr(self.dbapi, 'UNICODE')
+ ):
# cx_Oracle WITH_UNICODE mode. *only* python
# unicode objects accepted for anything
self.supports_unicode_statements = True
diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py
index 98fe6f085..d67f2a07e 100644
--- a/lib/sqlalchemy/dialects/postgresql/__init__.py
+++ b/lib/sqlalchemy/dialects/postgresql/__init__.py
@@ -12,11 +12,13 @@ base.dialect = psycopg2.dialect
from .base import \
INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \
INET, CIDR, UUID, BIT, MACADDR, OID, DOUBLE_PRECISION, TIMESTAMP, TIME, \
- DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \
- TSVECTOR, DropEnumType
-from .constraints import ExcludeConstraint
+ DATE, BYTEA, BOOLEAN, INTERVAL, ENUM, dialect, TSVECTOR, DropEnumType, \
+ CreateEnumType
from .hstore import HSTORE, hstore
-from .json import JSON, JSONElement, JSONB
+from .json import JSON, JSONB
+from .array import array, ARRAY, Any, All
+from .ext import aggregate_order_by, ExcludeConstraint, array_agg
+
from .ranges import INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, \
TSTZRANGE
@@ -24,8 +26,9 @@ __all__ = (
'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC',
'FLOAT', 'REAL', 'INET', 'CIDR', 'UUID', 'BIT', 'MACADDR', 'OID',
'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN',
- 'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE',
+ 'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'array', 'HSTORE',
'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE',
- 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'JSONElement',
- 'DropEnumType'
+ 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'Any', 'All',
+ 'DropEnumType', 'CreateEnumType', 'ExcludeConstraint',
+ 'aggregate_order_by', 'array_agg'
)
diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py
new file mode 100644
index 000000000..b88f139de
--- /dev/null
+++ b/lib/sqlalchemy/dialects/postgresql/array.py
@@ -0,0 +1,306 @@
+# postgresql/array.py
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from .base import ischema_names
+from ...sql import expression, operators
+from ...sql.base import SchemaEventTarget
+from ... import types as sqltypes
+
+try:
+ from uuid import UUID as _python_UUID
+except ImportError:
+ _python_UUID = None
+
+
+def Any(other, arrexpr, operator=operators.eq):
+ """A synonym for the :meth:`.ARRAY.Comparator.any` method.
+
+ This method is legacy and is here for backwards-compatiblity.
+
+ .. seealso::
+
+ :func:`.expression.any_`
+
+ """
+
+ return arrexpr.any(other, operator)
+
+
+def All(other, arrexpr, operator=operators.eq):
+ """A synonym for the :meth:`.ARRAY.Comparator.all` method.
+
+ This method is legacy and is here for backwards-compatiblity.
+
+ .. seealso::
+
+ :func:`.expression.all_`
+
+ """
+
+ return arrexpr.all(other, operator)
+
+
+class array(expression.Tuple):
+
+ """A Postgresql ARRAY literal.
+
+ This is used to produce ARRAY literals in SQL expressions, e.g.::
+
+ from sqlalchemy.dialects.postgresql import array
+ from sqlalchemy.dialects import postgresql
+ from sqlalchemy import select, func
+
+ stmt = select([
+ array([1,2]) + array([3,4,5])
+ ])
+
+ print stmt.compile(dialect=postgresql.dialect())
+
+ Produces the SQL::
+
+ SELECT ARRAY[%(param_1)s, %(param_2)s] ||
+ ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1
+
+ An instance of :class:`.array` will always have the datatype
+ :class:`.ARRAY`. The "inner" type of the array is inferred from
+ the values present, unless the ``type_`` keyword argument is passed::
+
+ array(['foo', 'bar'], type_=CHAR)
+
+ .. versionadded:: 0.8 Added the :class:`~.postgresql.array` literal type.
+
+ See also:
+
+ :class:`.postgresql.ARRAY`
+
+ """
+ __visit_name__ = 'array'
+
+ def __init__(self, clauses, **kw):
+ super(array, self).__init__(*clauses, **kw)
+ self.type = ARRAY(self.type)
+
+ def _bind_param(self, operator, obj):
+ return array([
+ expression.BindParameter(None, o, _compared_to_operator=operator,
+ _compared_to_type=self.type, unique=True)
+ for o in obj
+ ])
+
+ def self_group(self, against=None):
+ if (against in (
+ operators.any_op, operators.all_op, operators.getitem)):
+ return expression.Grouping(self)
+ else:
+ return self
+
+
+CONTAINS = operators.custom_op("@>", precedence=5)
+
+CONTAINED_BY = operators.custom_op("<@", precedence=5)
+
+OVERLAP = operators.custom_op("&&", precedence=5)
+
+
+class ARRAY(SchemaEventTarget, sqltypes.Array):
+
+ """Postgresql ARRAY type.
+
+ .. versionchanged:: 1.1 The :class:`.postgresql.ARRAY` type is now
+ a subclass of the core :class:`.Array` type.
+
+ The :class:`.postgresql.ARRAY` type is constructed in the same way
+ as the core :class:`.Array` type; a member type is required, and a
+ number of dimensions is recommended if the type is to be used for more
+ than one dimension::
+
+ from sqlalchemy.dialects import postgresql
+
+ mytable = Table("mytable", metadata,
+ Column("data", postgresql.ARRAY(Integer, dimensions=2))
+ )
+
+ The :class:`.postgresql.ARRAY` type provides all operations defined on the
+ core :class:`.Array` type, including support for "dimensions", indexed
+ access, and simple matching such as :meth:`.Array.Comparator.any`
+ and :meth:`.Array.Comparator.all`. :class:`.postgresql.ARRAY` class also
+ provides PostgreSQL-specific methods for containment operations, including
+ :meth:`.postgresql.ARRAY.Comparator.contains`
+ :meth:`.postgresql.ARRAY.Comparator.contained_by`,
+ and :meth:`.postgresql.ARRAY.Comparator.overlap`, e.g.::
+
+ mytable.c.data.contains([1, 2])
+
+ The :class:`.postgresql.ARRAY` type may not be supported on all
+ PostgreSQL DBAPIs; it is currently known to work on psycopg2 only.
+
+ Additionally, the :class:`.postgresql.ARRAY` type does not work directly in
+ conjunction with the :class:`.ENUM` type. For a workaround, see the
+ special type at :ref:`postgresql_array_of_enum`.
+
+ .. seealso::
+
+ :class:`.types.Array` - base array type
+
+ :class:`.postgresql.array` - produces a literal array value.
+
+ """
+
+ class Comparator(sqltypes.Array.Comparator):
+
+ """Define comparison operations for :class:`.ARRAY`.
+
+ Note that these operations are in addition to those provided
+ by the base :class:`.types.Array.Comparator` class, including
+ :meth:`.types.Array.Comparator.any` and
+ :meth:`.types.Array.Comparator.all`.
+
+ """
+
+ def contains(self, other, **kwargs):
+ """Boolean expression. Test if elements are a superset of the
+ elements of the argument array expression.
+ """
+ return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
+
+ def contained_by(self, other):
+ """Boolean expression. Test if elements are a proper subset of the
+ elements of the argument array expression.
+ """
+ return self.operate(
+ CONTAINED_BY, other, result_type=sqltypes.Boolean)
+
+ def overlap(self, other):
+ """Boolean expression. Test if array has elements in common with
+ an argument array expression.
+ """
+ return self.operate(OVERLAP, other, result_type=sqltypes.Boolean)
+
+ comparator_factory = Comparator
+
+ def __init__(self, item_type, as_tuple=False, dimensions=None,
+ zero_indexes=False):
+ """Construct an ARRAY.
+
+ E.g.::
+
+ Column('myarray', ARRAY(Integer))
+
+ Arguments are:
+
+ :param item_type: The data type of items of this array. Note that
+ dimensionality is irrelevant here, so multi-dimensional arrays like
+ ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
+ ``ARRAY(ARRAY(Integer))`` or such.
+
+ :param as_tuple=False: Specify whether return results
+ should be converted to tuples from lists. DBAPIs such
+ as psycopg2 return lists by default. When tuples are
+ returned, the results are hashable.
+
+ :param dimensions: if non-None, the ARRAY will assume a fixed
+ number of dimensions. This will cause the DDL emitted for this
+ ARRAY to include the exact number of bracket clauses ``[]``,
+ and will also optimize the performance of the type overall.
+ Note that PG arrays are always implicitly "non-dimensioned",
+ meaning they can store any number of dimensions no matter how
+ they were declared.
+
+ :param zero_indexes=False: when True, index values will be converted
+ between Python zero-based and Postgresql one-based indexes, e.g.
+ a value of one will be added to all index values before passing
+ to the database.
+
+ .. versionadded:: 0.9.5
+
+
+ """
+ if isinstance(item_type, ARRAY):
+ raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
+ "handles multi-dimensional arrays of basetype")
+ if isinstance(item_type, type):
+ item_type = item_type()
+ self.item_type = item_type
+ self.as_tuple = as_tuple
+ self.dimensions = dimensions
+ self.zero_indexes = zero_indexes
+
+ @property
+ def hashable(self):
+ return self.as_tuple
+
+ @property
+ def python_type(self):
+ return list
+
+ def compare_values(self, x, y):
+ return x == y
+
+ def _set_parent(self, column):
+ """Support SchemaEentTarget"""
+
+ if isinstance(self.item_type, SchemaEventTarget):
+ self.item_type._set_parent(column)
+
+ def _set_parent_with_dispatch(self, parent):
+ """Support SchemaEentTarget"""
+
+ if isinstance(self.item_type, SchemaEventTarget):
+ self.item_type._set_parent_with_dispatch(parent)
+
+ def _proc_array(self, arr, itemproc, dim, collection):
+ if dim is None:
+ arr = list(arr)
+ if dim == 1 or dim is None and (
+ # this has to be (list, tuple), or at least
+ # not hasattr('__iter__'), since Py3K strings
+ # etc. have __iter__
+ not arr or not isinstance(arr[0], (list, tuple))):
+ if itemproc:
+ return collection(itemproc(x) for x in arr)
+ else:
+ return collection(arr)
+ else:
+ return collection(
+ self._proc_array(
+ x, itemproc,
+ dim - 1 if dim is not None else None,
+ collection)
+ for x in arr
+ )
+
+ def bind_processor(self, dialect):
+ item_proc = self.item_type.dialect_impl(dialect).\
+ bind_processor(dialect)
+
+ def process(value):
+ if value is None:
+ return value
+ else:
+ return self._proc_array(
+ value,
+ item_proc,
+ self.dimensions,
+ list)
+ return process
+
+ def result_processor(self, dialect, coltype):
+ item_proc = self.item_type.dialect_impl(dialect).\
+ result_processor(dialect, coltype)
+
+ def process(value):
+ if value is None:
+ return value
+ else:
+ return self._proc_array(
+ value,
+ item_proc,
+ self.dimensions,
+ tuple if self.as_tuple else list)
+ return process
+
+ischema_names['_array'] = ARRAY
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index dc7987d74..ec12e1145 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -508,6 +508,41 @@ dialect in conjunction with the :class:`.Table` construct:
`Postgresql CREATE TABLE options
<http://www.postgresql.org/docs/9.3/static/sql-createtable.html>`_
+ARRAY Types
+-----------
+
+The Postgresql dialect supports arrays, both as multidimensional column types
+as well as array literals:
+
+* :class:`.postgresql.ARRAY` - ARRAY datatype
+
+* :class:`.postgresql.array` - array literal
+
+* :func:`.postgresql.array_agg` - ARRAY_AGG SQL function
+
+* :class:`.postgresql.aggregate_order_by` - helper for PG's ORDER BY aggregate
+ function syntax.
+
+JSON Types
+----------
+
+The Postgresql dialect supports both JSON and JSONB datatypes, including
+psycopg2's native support and support for all of Postgresql's special
+operators:
+
+* :class:`.postgresql.JSON`
+
+* :class:`.postgresql.JSONB`
+
+HSTORE Type
+-----------
+
+The Postgresql HSTORE type as well as hstore literals are supported:
+
+* :class:`.postgresql.HSTORE` - HSTORE datatype
+
+* :class:`.postgresql.hstore` - hstore literal
+
ENUM Types
----------
@@ -524,13 +559,54 @@ entity. The following sections should be consulted:
* :meth:`.postgresql.ENUM.create` , :meth:`.postgresql.ENUM.drop` - individual
CREATE and DROP commands for ENUM.
+.. _postgresql_array_of_enum:
+
+Using ENUM with ARRAY
+^^^^^^^^^^^^^^^^^^^^^
+
+The combination of ENUM and ARRAY is not directly supported by backend
+DBAPIs at this time. In order to send and receive an ARRAY of ENUM,
+use the following workaround type::
+
+ class ArrayOfEnum(ARRAY):
+
+ def bind_expression(self, bindvalue):
+ return sa.cast(bindvalue, self)
+
+ def result_processor(self, dialect, coltype):
+ super_rp = super(ArrayOfEnum, self).result_processor(
+ dialect, coltype)
+
+ def handle_raw_string(value):
+ inner = re.match(r"^{(.*)}$", value).group(1)
+ return inner.split(",")
+
+ def process(value):
+ if value is None:
+ return None
+ return super_rp(handle_raw_string(value))
+ return process
+
+E.g.::
+
+ Table(
+ 'mydata', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', ArrayOfEnum(ENUM('a', 'b, 'c', name='myenum')))
+
+ )
+
+This type is not included as a built-in type as it would be incompatible
+with a DBAPI that suddenly decides to support ARRAY of ENUM directly in
+a new version.
+
"""
from collections import defaultdict
import re
from ... import sql, schema, exc, util
from ...engine import default, reflection
-from ...sql import compiler, expression, operators, default_comparator
+from ...sql import compiler, expression
from ... import types as sqltypes
try:
@@ -722,417 +798,6 @@ class TSVECTOR(sqltypes.TypeEngine):
__visit_name__ = 'TSVECTOR'
-class _Slice(expression.ColumnElement):
- __visit_name__ = 'slice'
- type = sqltypes.NULLTYPE
-
- def __init__(self, slice_, source_comparator):
- self.start = default_comparator._check_literal(
- source_comparator.expr,
- operators.getitem, slice_.start)
- self.stop = default_comparator._check_literal(
- source_comparator.expr,
- operators.getitem, slice_.stop)
-
-
-class Any(expression.ColumnElement):
-
- """Represent the clause ``left operator ANY (right)``. ``right`` must be
- an array expression.
-
- .. seealso::
-
- :class:`.postgresql.ARRAY`
-
- :meth:`.postgresql.ARRAY.Comparator.any` - ARRAY-bound method
-
- """
- __visit_name__ = 'any'
-
- def __init__(self, left, right, operator=operators.eq):
- self.type = sqltypes.Boolean()
- self.left = expression._literal_as_binds(left)
- self.right = right
- self.operator = operator
-
-
-class All(expression.ColumnElement):
-
- """Represent the clause ``left operator ALL (right)``. ``right`` must be
- an array expression.
-
- .. seealso::
-
- :class:`.postgresql.ARRAY`
-
- :meth:`.postgresql.ARRAY.Comparator.all` - ARRAY-bound method
-
- """
- __visit_name__ = 'all'
-
- def __init__(self, left, right, operator=operators.eq):
- self.type = sqltypes.Boolean()
- self.left = expression._literal_as_binds(left)
- self.right = right
- self.operator = operator
-
-
-class array(expression.Tuple):
-
- """A Postgresql ARRAY literal.
-
- This is used to produce ARRAY literals in SQL expressions, e.g.::
-
- from sqlalchemy.dialects.postgresql import array
- from sqlalchemy.dialects import postgresql
- from sqlalchemy import select, func
-
- stmt = select([
- array([1,2]) + array([3,4,5])
- ])
-
- print stmt.compile(dialect=postgresql.dialect())
-
- Produces the SQL::
-
- SELECT ARRAY[%(param_1)s, %(param_2)s] ||
- ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1
-
- An instance of :class:`.array` will always have the datatype
- :class:`.ARRAY`. The "inner" type of the array is inferred from
- the values present, unless the ``type_`` keyword argument is passed::
-
- array(['foo', 'bar'], type_=CHAR)
-
- .. versionadded:: 0.8 Added the :class:`~.postgresql.array` literal type.
-
- See also:
-
- :class:`.postgresql.ARRAY`
-
- """
- __visit_name__ = 'array'
-
- def __init__(self, clauses, **kw):
- super(array, self).__init__(*clauses, **kw)
- self.type = ARRAY(self.type)
-
- def _bind_param(self, operator, obj):
- return array([
- expression.BindParameter(None, o, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
- for o in obj
- ])
-
- def self_group(self, against=None):
- return self
-
-
-class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
-
- """Postgresql ARRAY type.
-
- Represents values as Python lists.
-
- An :class:`.ARRAY` type is constructed given the "type"
- of element::
-
- mytable = Table("mytable", metadata,
- Column("data", ARRAY(Integer))
- )
-
- The above type represents an N-dimensional array,
- meaning Postgresql will interpret values with any number
- of dimensions automatically. To produce an INSERT
- construct that passes in a 1-dimensional array of integers::
-
- connection.execute(
- mytable.insert(),
- data=[1,2,3]
- )
-
- The :class:`.ARRAY` type can be constructed given a fixed number
- of dimensions::
-
- mytable = Table("mytable", metadata,
- Column("data", ARRAY(Integer, dimensions=2))
- )
-
- This has the effect of the :class:`.ARRAY` type
- specifying that number of bracketed blocks when a :class:`.Table`
- is used in a CREATE TABLE statement, or when the type is used
- within a :func:`.expression.cast` construct; it also causes
- the bind parameter and result set processing of the type
- to optimize itself to expect exactly that number of dimensions.
- Note that Postgresql itself still allows N dimensions with such a type.
-
- SQL expressions of type :class:`.ARRAY` have support for "index" and
- "slice" behavior. The Python ``[]`` operator works normally here, given
- integer indexes or slices. Note that Postgresql arrays default
- to 1-based indexing. The operator produces binary expression
- constructs which will produce the appropriate SQL, both for
- SELECT statements::
-
- select([mytable.c.data[5], mytable.c.data[2:7]])
-
- as well as UPDATE statements when the :meth:`.Update.values` method
- is used::
-
- mytable.update().values({
- mytable.c.data[5]: 7,
- mytable.c.data[2:7]: [1, 2, 3]
- })
-
- .. note::
-
- Multi-dimensional support for the ``[]`` operator is not supported
- in SQLAlchemy 1.0. Please use the :func:`.type_coerce` function
- to cast an intermediary expression to ARRAY again as a workaround::
-
- expr = type_coerce(my_array_column[5], ARRAY(Integer))[6]
-
- Multi-dimensional support will be provided in a future release.
-
- :class:`.ARRAY` provides special methods for containment operations,
- e.g.::
-
- mytable.c.data.contains([1, 2])
-
- For a full list of special methods see :class:`.ARRAY.Comparator`.
-
- .. versionadded:: 0.8 Added support for index and slice operations
- to the :class:`.ARRAY` type, including support for UPDATE
- statements, and special array containment operations.
-
- The :class:`.ARRAY` type may not be supported on all DBAPIs.
- It is known to work on psycopg2 and not pg8000.
-
- See also:
-
- :class:`.postgresql.array` - produce a literal array value.
-
- """
- __visit_name__ = 'ARRAY'
-
- class Comparator(sqltypes.Concatenable.Comparator):
-
- """Define comparison operations for :class:`.ARRAY`."""
-
- def __getitem__(self, index):
- shift_indexes = 1 if self.expr.type.zero_indexes else 0
- if isinstance(index, slice):
- if shift_indexes:
- index = slice(
- index.start + shift_indexes,
- index.stop + shift_indexes,
- index.step
- )
- index = _Slice(index, self)
- return_type = self.type
- else:
- index += shift_indexes
- return_type = self.type.item_type
-
- return default_comparator._binary_operate(
- self.expr, operators.getitem, index,
- result_type=return_type)
-
- def any(self, other, operator=operators.eq):
- """Return ``other operator ANY (array)`` clause.
-
- Argument places are switched, because ANY requires array
- expression to be on the right hand-side.
-
- E.g.::
-
- from sqlalchemy.sql import operators
-
- conn.execute(
- select([table.c.data]).where(
- table.c.data.any(7, operator=operators.lt)
- )
- )
-
- :param other: expression to be compared
- :param operator: an operator object from the
- :mod:`sqlalchemy.sql.operators`
- package, defaults to :func:`.operators.eq`.
-
- .. seealso::
-
- :class:`.postgresql.Any`
-
- :meth:`.postgresql.ARRAY.Comparator.all`
-
- """
- return Any(other, self.expr, operator=operator)
-
- def all(self, other, operator=operators.eq):
- """Return ``other operator ALL (array)`` clause.
-
- Argument places are switched, because ALL requires array
- expression to be on the right hand-side.
-
- E.g.::
-
- from sqlalchemy.sql import operators
-
- conn.execute(
- select([table.c.data]).where(
- table.c.data.all(7, operator=operators.lt)
- )
- )
-
- :param other: expression to be compared
- :param operator: an operator object from the
- :mod:`sqlalchemy.sql.operators`
- package, defaults to :func:`.operators.eq`.
-
- .. seealso::
-
- :class:`.postgresql.All`
-
- :meth:`.postgresql.ARRAY.Comparator.any`
-
- """
- return All(other, self.expr, operator=operator)
-
- def contains(self, other, **kwargs):
- """Boolean expression. Test if elements are a superset of the
- elements of the argument array expression.
- """
- return self.expr.op('@>')(other)
-
- def contained_by(self, other):
- """Boolean expression. Test if elements are a proper subset of the
- elements of the argument array expression.
- """
- return self.expr.op('<@')(other)
-
- def overlap(self, other):
- """Boolean expression. Test if array has elements in common with
- an argument array expression.
- """
- return self.expr.op('&&')(other)
-
- def _adapt_expression(self, op, other_comparator):
- if isinstance(op, operators.custom_op):
- if op.opstring in ['@>', '<@', '&&']:
- return op, sqltypes.Boolean
- return sqltypes.Concatenable.Comparator.\
- _adapt_expression(self, op, other_comparator)
-
- comparator_factory = Comparator
-
- def __init__(self, item_type, as_tuple=False, dimensions=None,
- zero_indexes=False):
- """Construct an ARRAY.
-
- E.g.::
-
- Column('myarray', ARRAY(Integer))
-
- Arguments are:
-
- :param item_type: The data type of items of this array. Note that
- dimensionality is irrelevant here, so multi-dimensional arrays like
- ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
- ``ARRAY(ARRAY(Integer))`` or such.
-
- :param as_tuple=False: Specify whether return results
- should be converted to tuples from lists. DBAPIs such
- as psycopg2 return lists by default. When tuples are
- returned, the results are hashable.
-
- :param dimensions: if non-None, the ARRAY will assume a fixed
- number of dimensions. This will cause the DDL emitted for this
- ARRAY to include the exact number of bracket clauses ``[]``,
- and will also optimize the performance of the type overall.
- Note that PG arrays are always implicitly "non-dimensioned",
- meaning they can store any number of dimensions no matter how
- they were declared.
-
- :param zero_indexes=False: when True, index values will be converted
- between Python zero-based and Postgresql one-based indexes, e.g.
- a value of one will be added to all index values before passing
- to the database.
-
- .. versionadded:: 0.9.5
-
- """
- if isinstance(item_type, ARRAY):
- raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
- "handles multi-dimensional arrays of basetype")
- if isinstance(item_type, type):
- item_type = item_type()
- self.item_type = item_type
- self.as_tuple = as_tuple
- self.dimensions = dimensions
- self.zero_indexes = zero_indexes
-
- @property
- def python_type(self):
- return list
-
- def compare_values(self, x, y):
- return x == y
-
- def _proc_array(self, arr, itemproc, dim, collection):
- if dim is None:
- arr = list(arr)
- if dim == 1 or dim is None and (
- # this has to be (list, tuple), or at least
- # not hasattr('__iter__'), since Py3K strings
- # etc. have __iter__
- not arr or not isinstance(arr[0], (list, tuple))):
- if itemproc:
- return collection(itemproc(x) for x in arr)
- else:
- return collection(arr)
- else:
- return collection(
- self._proc_array(
- x, itemproc,
- dim - 1 if dim is not None else None,
- collection)
- for x in arr
- )
-
- def bind_processor(self, dialect):
- item_proc = self.item_type.\
- dialect_impl(dialect).\
- bind_processor(dialect)
-
- def process(value):
- if value is None:
- return value
- else:
- return self._proc_array(
- value,
- item_proc,
- self.dimensions,
- list)
- return process
-
- def result_processor(self, dialect, coltype):
- item_proc = self.item_type.\
- dialect_impl(dialect).\
- result_processor(dialect, coltype)
-
- def process(value):
- if value is None:
- return value
- else:
- return self._proc_array(
- value,
- item_proc,
- self.dimensions,
- tuple if self.as_tuple else list)
- return process
-
-PGArray = ARRAY
-
-
class ENUM(sqltypes.Enum):
"""Postgresql ENUM type.
@@ -1385,26 +1050,18 @@ class PGCompiler(compiler.SQLCompiler):
self.process(element.stop, **kw),
)
- def visit_any(self, element, **kw):
- return "%s%sANY (%s)" % (
- self.process(element.left, **kw),
- compiler.OPERATORS[element.operator],
- self.process(element.right, **kw)
- )
-
- def visit_all(self, element, **kw):
- return "%s%sALL (%s)" % (
- self.process(element.left, **kw),
- compiler.OPERATORS[element.operator],
- self.process(element.right, **kw)
- )
-
def visit_getitem_binary(self, binary, operator, **kw):
return "%s[%s]" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw)
)
+ def visit_aggregate_order_by(self, element, **kw):
+ return "%s ORDER BY %s" % (
+ self.process(element.target, **kw),
+ self.process(element.order_by, **kw)
+ )
+
def visit_match_op_binary(self, binary, operator, **kw):
if "postgresql_regconfig" in binary.modifiers:
regconfig = self.render_literal_value(
@@ -1547,8 +1204,8 @@ class PGDDLCompiler(compiler.DDLCompiler):
else:
colspec += " SERIAL"
else:
- colspec += " " + self.dialect.type_compiler.process(column.type,
- type_expression=column)
+ colspec += " " + self.dialect.type_compiler.process(
+ column.type, type_expression=column)
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
@@ -2448,7 +2105,7 @@ class PGDialect(default.DefaultDialect):
if coltype:
coltype = coltype(*args, **kwargs)
if is_array:
- coltype = ARRAY(coltype)
+ coltype = self.ischema_names['_array'](coltype)
else:
util.warn("Did not recognize type '%s' of column '%s'" %
(attype, name))
@@ -2641,7 +2298,7 @@ class PGDialect(default.DefaultDialect):
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
a.attname, a.attnum, NULL, ix.indkey%s,
- i.reloptions, am.amname
+ %s, am.amname
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
@@ -2664,6 +2321,8 @@ class PGDialect(default.DefaultDialect):
# cast does not work in PG 8.2.4, does work in 8.3.0.
# nothing in PG changelogs regarding this.
"::varchar" if self.server_version_info >= (8, 3) else "",
+ "i.reloptions" if self.server_version_info >= (8, 2)
+ else "NULL",
self._pg_index_any("a.attnum", "ix.indkey")
)
else:
diff --git a/lib/sqlalchemy/dialects/postgresql/constraints.py b/lib/sqlalchemy/dialects/postgresql/ext.py
index 4cfc050de..9b2e3fd73 100644
--- a/lib/sqlalchemy/dialects/postgresql/constraints.py
+++ b/lib/sqlalchemy/dialects/postgresql/ext.py
@@ -1,11 +1,69 @@
-# Copyright (C) 2013-2015 the SQLAlchemy authors and contributors
+# postgresql/ext.py
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-from ...sql.schema import ColumnCollectionConstraint
+
from ...sql import expression
-from ... import util
+from ...sql import elements
+from ...sql import functions
+from ...sql.schema import ColumnCollectionConstraint
+from .array import ARRAY
+
+
+class aggregate_order_by(expression.ColumnElement):
+ """Represent a Postgresql aggregate order by expression.
+
+ E.g.::
+
+ from sqlalchemy.dialects.postgresql import aggregate_order_by
+ expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
+ stmt = select([expr])
+
+ would represent the expression::
+
+ SELECT array_agg(a ORDER BY b DESC) FROM table;
+
+ Similarly::
+
+ expr = func.string_agg(
+ table.c.a,
+ aggregate_order_by(literal_column("','"), table.c.a)
+ )
+ stmt = select([expr])
+
+ Would represent::
+
+ SELECT string_agg(a, ',' ORDER BY a) FROM table;
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :class:`.array_agg`
+
+ """
+
+ __visit_name__ = 'aggregate_order_by'
+
+ def __init__(self, target, order_by):
+ self.target = elements._literal_as_binds(target)
+ self.order_by = elements._literal_as_binds(order_by)
+
+ def self_group(self, against=None):
+ return self
+
+ def get_children(self, **kwargs):
+ return self.target, self.order_by
+
+ def _copy_internals(self, clone=elements._clone, **kw):
+ self.target = clone(self.target, **kw)
+ self.order_by = clone(self.order_by, **kw)
+
+ @property
+ def _from_objects(self):
+ return self.target._from_objects + self.order_by._from_objects
class ExcludeConstraint(ColumnCollectionConstraint):
@@ -96,3 +154,15 @@ static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
initially=self.initially)
c.dispatch._update(self.dispatch)
return c
+
+
+def array_agg(*arg, **kw):
+ """Postgresql-specific form of :class:`.array_agg`, ensures
+ return type is :class:`.postgresql.ARRAY` and not
+ the plain :class:`.types.Array`.
+
+ .. versionadded:: 1.1
+
+ """
+ kw['type_'] = ARRAY(functions._type_from_args(arg))
+ return functions.func.array_agg(*arg, **kw)
diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py
index 9f369cb5b..b7b0fc007 100644
--- a/lib/sqlalchemy/dialects/postgresql/hstore.py
+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py
@@ -7,110 +7,43 @@
import re
-from .base import ARRAY, ischema_names
+from .base import ischema_names
+from .array import ARRAY
from ... import types as sqltypes
from ...sql import functions as sqlfunc
+from ...sql import operators
from ...sql.operators import custom_op
from ... import util
__all__ = ('HSTORE', 'hstore')
-# My best guess at the parsing rules of hstore literals, since no formal
-# grammar is given. This is mostly reverse engineered from PG's input parser
-# behavior.
-HSTORE_PAIR_RE = re.compile(r"""
-(
- "(?P<key> (\\ . | [^"])* )" # Quoted key
-)
-[ ]* => [ ]* # Pair operator, optional adjoining whitespace
-(
- (?P<value_null> NULL ) # NULL value
- | "(?P<value> (\\ . | [^"])* )" # Quoted value
-)
-""", re.VERBOSE)
-
-HSTORE_DELIMITER_RE = re.compile(r"""
-[ ]* , [ ]*
-""", re.VERBOSE)
-
-
-def _parse_error(hstore_str, pos):
- """format an unmarshalling error."""
-
- ctx = 20
- hslen = len(hstore_str)
-
- parsed_tail = hstore_str[max(pos - ctx - 1, 0):min(pos, hslen)]
- residual = hstore_str[min(pos, hslen):min(pos + ctx + 1, hslen)]
- if len(parsed_tail) > ctx:
- parsed_tail = '[...]' + parsed_tail[1:]
- if len(residual) > ctx:
- residual = residual[:-1] + '[...]'
-
- return "After %r, could not parse residual at position %d: %r" % (
- parsed_tail, pos, residual)
-
-
-def _parse_hstore(hstore_str):
- """Parse an hstore from its literal string representation.
-
- Attempts to approximate PG's hstore input parsing rules as closely as
- possible. Although currently this is not strictly necessary, since the
- current implementation of hstore's output syntax is stricter than what it
- accepts as input, the documentation makes no guarantees that will always
- be the case.
-
-
-
- """
- result = {}
- pos = 0
- pair_match = HSTORE_PAIR_RE.match(hstore_str)
-
- while pair_match is not None:
- key = pair_match.group('key').replace(r'\"', '"').replace(
- "\\\\", "\\")
- if pair_match.group('value_null'):
- value = None
- else:
- value = pair_match.group('value').replace(
- r'\"', '"').replace("\\\\", "\\")
- result[key] = value
-
- pos += pair_match.end()
-
- delim_match = HSTORE_DELIMITER_RE.match(hstore_str[pos:])
- if delim_match is not None:
- pos += delim_match.end()
-
- pair_match = HSTORE_PAIR_RE.match(hstore_str[pos:])
-
- if pos != len(hstore_str):
- raise ValueError(_parse_error(hstore_str, pos))
+INDEX = custom_op(
+ "->", precedence=5, natural_self_precedent=True
+)
- return result
+HAS_KEY = operators.custom_op(
+ "?", precedence=5, natural_self_precedent=True
+)
+HAS_ALL = operators.custom_op(
+ "?&", precedence=5, natural_self_precedent=True
+)
-def _serialize_hstore(val):
- """Serialize a dictionary into an hstore literal. Keys and values must
- both be strings (except None for values).
+HAS_ANY = operators.custom_op(
+ "?|", precedence=5, natural_self_precedent=True
+)
- """
- def esc(s, position):
- if position == 'value' and s is None:
- return 'NULL'
- elif isinstance(s, util.string_types):
- return '"%s"' % s.replace("\\", "\\\\").replace('"', r'\"')
- else:
- raise ValueError("%r in %s position is not a string." %
- (s, position))
+CONTAINS = operators.custom_op(
+ "@>", precedence=5, natural_self_precedent=True
+)
- return ', '.join('%s=>%s' % (esc(k, 'key'), esc(v, 'value'))
- for k, v in val.items())
+CONTAINED_BY = operators.custom_op(
+ "<@", precedence=5, natural_self_precedent=True
+)
-class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
+class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine):
"""Represent the Postgresql HSTORE type.
The :class:`.HSTORE` type stores dictionaries containing strings, e.g.::
@@ -185,51 +118,61 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
__visit_name__ = 'HSTORE'
hashable = False
+ text_type = sqltypes.Text()
+
+ def __init__(self, text_type=None):
+ """Construct a new :class:`.HSTORE`.
+
+ :param text_type: the type that should be used for indexed values.
+ Defaults to :class:`.types.Text`.
+
+ .. versionadded:: 1.1.0
- class comparator_factory(sqltypes.Concatenable.Comparator):
+ """
+ if text_type is not None:
+ self.text_type = text_type
+
+ class Comparator(
+ sqltypes.Indexable.Comparator, sqltypes.Concatenable.Comparator):
"""Define comparison operations for :class:`.HSTORE`."""
def has_key(self, other):
"""Boolean expression. Test for presence of a key. Note that the
key may be a SQLA expression.
"""
- return self.expr.op('?')(other)
+ return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
def has_all(self, other):
- """Boolean expression. Test for presence of all keys in the PG
- array.
+ """Boolean expression. Test for presence of all keys in jsonb
"""
- return self.expr.op('?&')(other)
+ return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
def has_any(self, other):
- """Boolean expression. Test for presence of any key in the PG
- array.
+ """Boolean expression. Test for presence of any key in jsonb
"""
- return self.expr.op('?|')(other)
-
- def defined(self, key):
- """Boolean expression. Test for presence of a non-NULL value for
- the key. Note that the key may be a SQLA expression.
- """
- return _HStoreDefinedFunction(self.expr, key)
+ return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
def contains(self, other, **kwargs):
- """Boolean expression. Test if keys are a superset of the keys of
- the argument hstore expression.
+ """Boolean expression. Test if keys (or array) are a superset
+ of/contained the keys of the argument jsonb expression.
"""
- return self.expr.op('@>')(other)
+ return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
def contained_by(self, other):
"""Boolean expression. Test if keys are a proper subset of the
- keys of the argument hstore expression.
+ keys of the argument jsonb expression.
"""
- return self.expr.op('<@')(other)
+ return self.operate(
+ CONTAINED_BY, other, result_type=sqltypes.Boolean)
- def __getitem__(self, other):
- """Text expression. Get the value at a given key. Note that the
- key may be a SQLA expression.
+ def _setup_getitem(self, index):
+ return INDEX, index, self.type.text_type
+
+ def defined(self, key):
+ """Boolean expression. Test for presence of a non-NULL value for
+ the key. Note that the key may be a SQLA expression.
"""
- return self.expr.op('->', precedence=5)(other)
+ return _HStoreDefinedFunction(self.expr, key)
def delete(self, key):
"""HStore expression. Returns the contents of this hstore with the
@@ -263,14 +206,7 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
"""Text array expression. Returns array of [key, value] pairs."""
return _HStoreMatrixFunction(self.expr)
- def _adapt_expression(self, op, other_comparator):
- if isinstance(op, custom_op):
- if op.opstring in ['?', '?&', '?|', '@>', '<@']:
- return op, sqltypes.Boolean
- elif op.opstring == '->':
- return op, sqltypes.Text
- return sqltypes.Concatenable.Comparator.\
- _adapt_expression(self, op, other_comparator)
+ comparator_factory = Comparator
def bind_processor(self, dialect):
if util.py2k:
@@ -374,3 +310,105 @@ class _HStoreArrayFunction(sqlfunc.GenericFunction):
class _HStoreMatrixFunction(sqlfunc.GenericFunction):
type = ARRAY(sqltypes.Text)
name = 'hstore_to_matrix'
+
+
+#
+# parsing. note that none of this is used with the psycopg2 backend,
+# which provides its own native extensions.
+#
+
+# My best guess at the parsing rules of hstore literals, since no formal
+# grammar is given. This is mostly reverse engineered from PG's input parser
+# behavior.
+HSTORE_PAIR_RE = re.compile(r"""
+(
+ "(?P<key> (\\ . | [^"])* )" # Quoted key
+)
+[ ]* => [ ]* # Pair operator, optional adjoining whitespace
+(
+ (?P<value_null> NULL ) # NULL value
+ | "(?P<value> (\\ . | [^"])* )" # Quoted value
+)
+""", re.VERBOSE)
+
+HSTORE_DELIMITER_RE = re.compile(r"""
+[ ]* , [ ]*
+""", re.VERBOSE)
+
+
+def _parse_error(hstore_str, pos):
+ """format an unmarshalling error."""
+
+ ctx = 20
+ hslen = len(hstore_str)
+
+ parsed_tail = hstore_str[max(pos - ctx - 1, 0):min(pos, hslen)]
+ residual = hstore_str[min(pos, hslen):min(pos + ctx + 1, hslen)]
+
+ if len(parsed_tail) > ctx:
+ parsed_tail = '[...]' + parsed_tail[1:]
+ if len(residual) > ctx:
+ residual = residual[:-1] + '[...]'
+
+ return "After %r, could not parse residual at position %d: %r" % (
+ parsed_tail, pos, residual)
+
+
+def _parse_hstore(hstore_str):
+ """Parse an hstore from its literal string representation.
+
+ Attempts to approximate PG's hstore input parsing rules as closely as
+ possible. Although currently this is not strictly necessary, since the
+ current implementation of hstore's output syntax is stricter than what it
+ accepts as input, the documentation makes no guarantees that will always
+ be the case.
+
+
+
+ """
+ result = {}
+ pos = 0
+ pair_match = HSTORE_PAIR_RE.match(hstore_str)
+
+ while pair_match is not None:
+ key = pair_match.group('key').replace(r'\"', '"').replace(
+ "\\\\", "\\")
+ if pair_match.group('value_null'):
+ value = None
+ else:
+ value = pair_match.group('value').replace(
+ r'\"', '"').replace("\\\\", "\\")
+ result[key] = value
+
+ pos += pair_match.end()
+
+ delim_match = HSTORE_DELIMITER_RE.match(hstore_str[pos:])
+ if delim_match is not None:
+ pos += delim_match.end()
+
+ pair_match = HSTORE_PAIR_RE.match(hstore_str[pos:])
+
+ if pos != len(hstore_str):
+ raise ValueError(_parse_error(hstore_str, pos))
+
+ return result
+
+
+def _serialize_hstore(val):
+ """Serialize a dictionary into an hstore literal. Keys and values must
+ both be strings (except None for values).
+
+ """
+ def esc(s, position):
+ if position == 'value' and s is None:
+ return 'NULL'
+ elif isinstance(s, util.string_types):
+ return '"%s"' % s.replace("\\", "\\\\").replace('"', r'\"')
+ else:
+ raise ValueError("%r in %s position is not a string." %
+ (s, position))
+
+ return ', '.join('%s=>%s' % (esc(k, 'key'), esc(v, 'value'))
+ for k, v in val.items())
+
+
diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py
index 13ebc4afe..8a50270f5 100644
--- a/lib/sqlalchemy/dialects/postgresql/json.py
+++ b/lib/sqlalchemy/dialects/postgresql/json.py
@@ -6,96 +6,60 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from __future__ import absolute_import
+import collections
import json
from .base import ischema_names
from ... import types as sqltypes
-from ...sql.operators import custom_op
-from ... import sql
-from ...sql import elements, default_comparator
+from ...sql import operators
+from ...sql import elements
from ... import util
-__all__ = ('JSON', 'JSONElement', 'JSONB')
+__all__ = ('JSON', 'JSONB')
-class JSONElement(elements.BinaryExpression):
- """Represents accessing an element of a :class:`.JSON` value.
+# json : returns json
+INDEX = operators.custom_op(
+ "->", precedence=5, natural_self_precedent=True
+)
- The :class:`.JSONElement` is produced whenever using the Python index
- operator on an expression that has the type :class:`.JSON`::
+# path operator: returns json
+PATHIDX = operators.custom_op(
+ "#>", precedence=5, natural_self_precedent=True
+)
- expr = mytable.c.json_data['some_key']
+# json + astext: returns text
+ASTEXT = operators.custom_op(
+ "->>", precedence=5, natural_self_precedent=True
+)
- The expression typically compiles to a JSON access such as ``col -> key``.
- Modifiers are then available for typing behavior, including
- :meth:`.JSONElement.cast` and :attr:`.JSONElement.astext`.
+# path operator + astext: returns text
+ASTEXT_PATHIDX = operators.custom_op(
+ "#>>", precedence=5, natural_self_precedent=True
+)
- """
-
- def __init__(self, left, right, astext=False,
- opstring=None, result_type=None):
- self._astext = astext
- if opstring is None:
- if hasattr(right, '__iter__') and \
- not isinstance(right, util.string_types):
- opstring = "#>"
- right = "{%s}" % (
- ", ".join(util.text_type(elem) for elem in right))
- else:
- opstring = "->"
-
- self._json_opstring = opstring
- operator = custom_op(opstring, precedence=5)
- right = default_comparator._check_literal(
- left, operator, right)
- super(JSONElement, self).__init__(
- left, right, operator, type_=result_type)
-
- @property
- def astext(self):
- """Convert this :class:`.JSONElement` to use the 'astext' operator
- when evaluated.
-
- E.g.::
-
- select([data_table.c.data['some key'].astext])
-
- .. seealso::
-
- :meth:`.JSONElement.cast`
-
- """
- if self._astext:
- return self
- else:
- return JSONElement(
- self.left,
- self.right,
- astext=True,
- opstring=self._json_opstring + ">",
- result_type=sqltypes.String(convert_unicode=True)
- )
-
- def cast(self, type_):
- """Convert this :class:`.JSONElement` to apply both the 'astext' operator
- as well as an explicit type cast when evaluated.
-
- E.g.::
+HAS_KEY = operators.custom_op(
+ "?", precedence=5, natural_self_precedent=True
+)
- select([data_table.c.data['some key'].cast(Integer)])
+HAS_ALL = operators.custom_op(
+ "?&", precedence=5, natural_self_precedent=True
+)
- .. seealso::
+HAS_ANY = operators.custom_op(
+ "?|", precedence=5, natural_self_precedent=True
+)
- :attr:`.JSONElement.astext`
+CONTAINS = operators.custom_op(
+ "@>", precedence=5, natural_self_precedent=True
+)
- """
- if not self._astext:
- return self.astext.cast(type_)
- else:
- return sql.cast(self, type_)
+CONTAINED_BY = operators.custom_op(
+ "<@", precedence=5, natural_self_precedent=True
+)
-class JSON(sqltypes.TypeEngine):
+class JSON(sqltypes.Indexable, sqltypes.TypeEngine):
"""Represent the Postgresql JSON type.
The :class:`.JSON` type stores arbitrary JSON format data, e.g.::
@@ -113,31 +77,36 @@ class JSON(sqltypes.TypeEngine):
:class:`.JSON` provides several operations:
- * Index operations::
+ * Index operations (the ``->`` operator)::
data_table.c.data['some key']
- * Index operations returning text (required for text comparison)::
+ * Index operations returning text (the ``->>`` operator)::
data_table.c.data['some key'].astext == 'some value'
- * Index operations with a built-in CAST call::
+ * Index operations with CAST
+ (equivalent to ``CAST(col ->> ['some key'] AS <type>)``)::
- data_table.c.data['some key'].cast(Integer) == 5
+ data_table.c.data['some key'].astext.cast(Integer) == 5
- * Path index operations::
+ * Path index operations (the ``#>`` operator)::
data_table.c.data[('key_1', 'key_2', ..., 'key_n')]
- * Path index operations returning text (required for text comparison)::
+ * Path index operations returning text (the ``#>>`` operator)::
+
+ data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \
+'some value'
- data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \\
- 'some value'
+ .. versionchanged:: 1.1 The :meth:`.ColumnElement.cast` operator on
+ JSON objects now requires that the :attr:`.JSON.Comparator.astext`
+ modifier be called explicitly, if the cast works only from a textual
+ string.
- Index operations return an instance of :class:`.JSONElement`, which
- represents an expression such as ``column -> index``. This element then
- defines methods such as :attr:`.JSONElement.astext` and
- :meth:`.JSONElement.cast` for setting up type behavior.
+ Index operations return an expression object whose type defaults to
+ :class:`.JSON` by default, so that further JSON-oriented instructions
+ may be called upon the result type.
The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not
detect in-place mutations to the structure. In order to detect these, the
@@ -146,6 +115,29 @@ class JSON(sqltypes.TypeEngine):
will be detected by the unit of work. See the example at :class:`.HSTORE`
for a simple example involving a dictionary.
+ When working with NULL values, the :class:`.JSON` type recommends the
+ use of two specific constants in order to differentiate between a column
+ that evaluates to SQL NULL, e.g. no value, vs. the JSON-encoded string
+ of ``"null"``. To insert or select against a value that is SQL NULL,
+ use the constant :func:`.null`::
+
+ conn.execute(table.insert(), json_value=null())
+
+ To insert or select against a value that is JSON ``"null"``, use the
+ constant :attr:`.JSON.NULL`::
+
+ conn.execute(table.insert(), json_value=JSON.NULL)
+
+ The :class:`.JSON` type supports a flag
+ :paramref:`.JSON.none_as_null` which when set to True will result
+ in the Python constant ``None`` evaluating to the value of SQL
+ NULL, and when set to False results in the Python constant
+ ``None`` evaluating to the value of JSON ``"null"``. The Python
+ value ``None`` may be used in conjunction with either
+ :attr:`.JSON.NULL` and :func:`.null` in order to indicate NULL
+ values, but care must be taken as to the value of the
+ :paramref:`.JSON.none_as_null` in these cases.
+
Custom serializers and deserializers are specified at the dialect level,
that is using :func:`.create_engine`. The reason for this is that when
using psycopg2, the DBAPI only allows serializers at the per-cursor
@@ -161,11 +153,42 @@ class JSON(sqltypes.TypeEngine):
.. versionadded:: 0.9
+ .. seealso::
+
+ :class:`.JSONB`
+
"""
__visit_name__ = 'JSON'
- def __init__(self, none_as_null=False):
+ hashable = False
+ astext_type = sqltypes.Text()
+
+ NULL = util.symbol('JSON_NULL')
+ """Describe the json value of NULL.
+
+ This value is used to force the JSON value of ``"null"`` to be
+ used as the value. A value of Python ``None`` will be recognized
+ either as SQL NULL or JSON ``"null"``, based on the setting
+ of the :paramref:`.JSON.none_as_null` flag; the :attr:`.JSON.NULL`
+ constant can be used to always resolve to JSON ``"null"`` regardless
+ of this setting. This is in contrast to the :func:`.sql.null` construct,
+ which always resolves to SQL NULL. E.g.::
+
+ from sqlalchemy import null
+ from sqlalchemy.dialects.postgresql import JSON
+
+ obj1 = MyObject(json_value=null()) # will *always* insert SQL NULL
+ obj2 = MyObject(json_value=JSON.NULL) # will *always* insert JSON string "null"
+
+ session.add_all([obj1, obj2])
+ session.commit()
+
+ .. versionadded:: 1.1
+
+ """
+
+ def __init__(self, none_as_null=False, astext_type=None):
"""Construct a :class:`.JSON` type.
:param none_as_null: if True, persist the value ``None`` as a
@@ -179,58 +202,99 @@ class JSON(sqltypes.TypeEngine):
.. versionchanged:: 0.9.8 - Added ``none_as_null``, and :func:`.null`
is now supported in order to persist a NULL value.
+ .. seealso::
+
+ :attr:`.JSON.NULL`
+
+ :param astext_type: the type to use for the
+ :attr:`.JSON.Comparator.astext`
+ accessor on indexed attributes. Defaults to :class:`.types.Text`.
+
+ .. versionadded:: 1.1.0
+
"""
self.none_as_null = none_as_null
+ if astext_type is not None:
+ self.astext_type = astext_type
- class comparator_factory(sqltypes.Concatenable.Comparator):
+ class Comparator(
+ sqltypes.Indexable.Comparator, sqltypes.Concatenable.Comparator):
"""Define comparison operations for :class:`.JSON`."""
- def __getitem__(self, other):
- """Get the value at a given key."""
+ @property
+ def astext(self):
+ """On an indexed expression, use the "astext" (e.g. "->>")
+ conversion when rendered in SQL.
+
+ E.g.::
+
+ select([data_table.c.data['some key'].astext])
+
+ .. seealso::
+
+ :meth:`.ColumnElement.cast`
+
+ """
+ against = self.expr.operator
+ if against is PATHIDX:
+ against = ASTEXT_PATHIDX
+ else:
+ against = ASTEXT
+
+ return self.expr.left.operate(
+ against, self.expr.right, result_type=self.type.astext_type)
+
+ def _setup_getitem(self, index):
+ if not isinstance(index, util.string_types):
+ assert isinstance(index, collections.Sequence)
+ tokens = [util.text_type(elem) for elem in index]
+ index = "{%s}" % (", ".join(tokens))
+ operator = PATHIDX
+ else:
+ operator = INDEX
- return JSONElement(self.expr, other)
+ return operator, index, self.type
- def _adapt_expression(self, op, other_comparator):
- if isinstance(op, custom_op):
- if op.opstring == '->':
- return op, sqltypes.Text
- return sqltypes.Concatenable.Comparator.\
- _adapt_expression(self, op, other_comparator)
+ comparator_factory = Comparator
+
+ @property
+ def should_evaluate_none(self):
+ return not self.none_as_null
def bind_processor(self, dialect):
json_serializer = dialect._json_serializer or json.dumps
if util.py2k:
encoding = dialect.encoding
-
- def process(value):
- if isinstance(value, elements.Null) or (
- value is None and self.none_as_null
- ):
- return None
- return json_serializer(value).encode(encoding)
else:
- def process(value):
- if isinstance(value, elements.Null) or (
- value is None and self.none_as_null
- ):
- return None
+ encoding = None
+
+ def process(value):
+ if value is self.NULL:
+ value = None
+ elif isinstance(value, elements.Null) or (
+ value is None and self.none_as_null
+ ):
+ return None
+ if encoding:
+ return json_serializer(value).encode(encoding)
+ else:
return json_serializer(value)
+
return process
def result_processor(self, dialect, coltype):
json_deserializer = dialect._json_deserializer or json.loads
if util.py2k:
encoding = dialect.encoding
-
- def process(value):
- if value is None:
- return None
- return json_deserializer(value.decode(encoding))
else:
- def process(value):
- if value is None:
- return None
- return json_deserializer(value)
+ encoding = None
+
+ def process(value):
+ if value is None:
+ return None
+ if encoding:
+ value = value.decode(encoding)
+ return json_deserializer(value)
return process
@@ -253,106 +317,68 @@ class JSONB(JSON):
data = {"key1": "value1", "key2": "value2"}
)
- :class:`.JSONB` provides several operations:
-
- * Index operations::
-
- data_table.c.data['some key']
-
- * Index operations returning text (required for text comparison)::
+ The :class:`.JSONB` type includes all operations provided by
+ :class:`.JSON`, including the same behaviors for indexing operations.
+ It also adds additional operators specific to JSONB, including
+ :meth:`.JSONB.Comparator.has_key`, :meth:`.JSONB.Comparator.has_all`,
+ :meth:`.JSONB.Comparator.has_any`, :meth:`.JSONB.Comparator.contains`,
+ and :meth:`.JSONB.Comparator.contained_by`.
+
+ Like the :class:`.JSON` type, the :class:`.JSONB` type does not detect
+ in-place changes when used with the ORM, unless the
+ :mod:`sqlalchemy.ext.mutable` extension is used.
+
+ Custom serializers and deserializers
+ are shared with the :class:`.JSON` class, using the ``json_serializer``
+ and ``json_deserializer`` keyword arguments. These must be specified
+ at the dialect level using :func:`.create_engine`. When using
+ psycopg2, the serializers are associated with the jsonb type using
+ ``psycopg2.extras.register_default_jsonb`` on a per-connection basis,
+ in the same way that ``psycopg2.extras.register_default_json`` is used
+ to register these handlers with the json type.
- data_table.c.data['some key'].astext == 'some value'
-
- * Index operations with a built-in CAST call::
-
- data_table.c.data['some key'].cast(Integer) == 5
-
- * Path index operations::
-
- data_table.c.data[('key_1', 'key_2', ..., 'key_n')]
-
- * Path index operations returning text (required for text comparison)::
-
- data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \\
- 'some value'
-
- Index operations return an instance of :class:`.JSONElement`, which
- represents an expression such as ``column -> index``. This element then
- defines methods such as :attr:`.JSONElement.astext` and
- :meth:`.JSONElement.cast` for setting up type behavior.
-
- The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not
- detect in-place mutations to the structure. In order to detect these, the
- :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
- allow "in-place" changes to the datastructure to produce events which
- will be detected by the unit of work. See the example at :class:`.HSTORE`
- for a simple example involving a dictionary.
-
- Custom serializers and deserializers are specified at the dialect level,
- that is using :func:`.create_engine`. The reason for this is that when
- using psycopg2, the DBAPI only allows serializers at the per-cursor
- or per-connection level. E.g.::
-
- engine = create_engine("postgresql://scott:tiger@localhost/test",
- json_serializer=my_serialize_fn,
- json_deserializer=my_deserialize_fn
- )
+ .. versionadded:: 0.9.7
- When using the psycopg2 dialect, the json_deserializer is registered
- against the database using ``psycopg2.extras.register_default_json``.
+ .. seealso::
- .. versionadded:: 0.9.7
+ :class:`.JSON`
"""
__visit_name__ = 'JSONB'
- hashable = False
- class comparator_factory(sqltypes.Concatenable.Comparator):
+ class Comparator(JSON.Comparator):
"""Define comparison operations for :class:`.JSON`."""
- def __getitem__(self, other):
- """Get the value at a given key."""
-
- return JSONElement(self.expr, other)
-
- def _adapt_expression(self, op, other_comparator):
- # How does one do equality?? jsonb also has "=" eg.
- # '[1,2,3]'::jsonb = '[1,2,3]'::jsonb
- if isinstance(op, custom_op):
- if op.opstring in ['?', '?&', '?|', '@>', '<@']:
- return op, sqltypes.Boolean
- if op.opstring == '->':
- return op, sqltypes.Text
- return sqltypes.Concatenable.Comparator.\
- _adapt_expression(self, op, other_comparator)
-
def has_key(self, other):
"""Boolean expression. Test for presence of a key. Note that the
key may be a SQLA expression.
"""
- return self.expr.op('?')(other)
+ return self.operate(HAS_KEY, other, result_type=sqltypes.Boolean)
def has_all(self, other):
"""Boolean expression. Test for presence of all keys in jsonb
"""
- return self.expr.op('?&')(other)
+ return self.operate(HAS_ALL, other, result_type=sqltypes.Boolean)
def has_any(self, other):
"""Boolean expression. Test for presence of any key in jsonb
"""
- return self.expr.op('?|')(other)
+ return self.operate(HAS_ANY, other, result_type=sqltypes.Boolean)
def contains(self, other, **kwargs):
- """Boolean expression. Test if keys (or array) are a superset of/contained
- the keys of the argument jsonb expression.
+ """Boolean expression. Test if keys (or array) are a superset
+ of/contained the keys of the argument jsonb expression.
"""
- return self.expr.op('@>')(other)
+ return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
def contained_by(self, other):
"""Boolean expression. Test if keys are a proper subset of the
keys of the argument jsonb expression.
"""
- return self.expr.op('<@')(other)
+ return self.operate(
+ CONTAINED_BY, other, result_type=sqltypes.Boolean)
+
+ comparator_factory = Comparator
ischema_names['jsonb'] = JSONB
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 36a9d7bf7..d33554922 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -320,7 +320,7 @@ from ...sql import expression
from ... import types as sqltypes
from .base import PGDialect, PGCompiler, \
PGIdentifierPreparer, PGExecutionContext, \
- ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
+ ENUM, _DECIMAL_TYPES, _FLOAT_TYPES,\
_INT_TYPES, UUID
from .hstore import HSTORE
from .json import JSON, JSONB
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index d9da46f4c..44a8cf278 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -894,11 +894,25 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
return preparer.format_table(table, use_schema=False)
- def visit_create_index(self, create):
+ def visit_create_index(self, create, include_schema=False,
+ include_table_schema=True):
index = create.element
-
- text = super(SQLiteDDLCompiler, self).visit_create_index(
- create, include_table_schema=False)
+ self._verify_index_table(index)
+ preparer = self.preparer
+ text = "CREATE "
+ if index.unique:
+ text += "UNIQUE "
+ text += "INDEX %s ON %s (%s)" \
+ % (
+ self._prepared_index_name(index,
+ include_schema=True),
+ preparer.format_table(index.table,
+ use_schema=False),
+ ', '.join(
+ self.sql_compiler.process(
+ expr, include_table=False, literal_binds=True) for
+ expr in index.expressions)
+ )
whereclause = index.dialect_options["sqlite"]["where"]
if whereclause is not None:
@@ -1095,6 +1109,13 @@ class SQLiteDialect(default.DefaultDialect):
return None
@reflection.cache
+ def get_schema_names(self, connection, **kw):
+ s = "PRAGMA database_list"
+ dl = connection.execute(s)
+
+ return [db[1] for db in dl if db[1] != "temp"]
+
+ @reflection.cache
def get_table_names(self, connection, schema=None, **kw):
if schema is not None:
qschema = self.identifier_preparer.quote_identifier(schema)
@@ -1283,7 +1304,7 @@ class SQLiteDialect(default.DefaultDialect):
fk = fks[numerical_id] = {
'name': None,
'constrained_columns': [],
- 'referred_schema': None,
+ 'referred_schema': schema,
'referred_table': rtbl,
'referred_columns': [],
}
@@ -1387,7 +1408,7 @@ class SQLiteDialect(default.DefaultDialect):
unique_constraints = []
def parse_uqs():
- UNIQUE_PATTERN = '(?:CONSTRAINT (\w+) +)?UNIQUE *\((.+?)\)'
+ UNIQUE_PATTERN = '(?:CONSTRAINT "?(.+?)"? +)?UNIQUE *\((.+?)\)'
INLINE_UNIQUE_PATTERN = (
'(?:(".+?")|([a-z0-9]+)) '
'+[a-z0-9_ ]+? +UNIQUE')
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index ae0473a3e..b3f8e307a 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -608,8 +608,8 @@ class SybaseDialect(default.DefaultDialect):
FROM sysreferences r JOIN sysobjects o on r.tableid = o.id
WHERE r.tableid = :table_id
""")
- referential_constraints = connection.execute(REFCONSTRAINT_SQL,
- table_id=table_id)
+ referential_constraints = connection.execute(
+ REFCONSTRAINT_SQL, table_id=table_id).fetchall()
REFTABLE_SQL = text("""
SELECT o.name AS name, u.name AS 'schema'
@@ -740,10 +740,13 @@ class SybaseDialect(default.DefaultDialect):
results.close()
constrained_columns = []
- for i in range(1, pks["count"] + 1):
- constrained_columns.append(pks["pk_%i" % (i,)])
- return {"constrained_columns": constrained_columns,
- "name": pks["name"]}
+ if pks:
+ for i in range(1, pks["count"] + 1):
+ constrained_columns.append(pks["pk_%i" % (i,)])
+ return {"constrained_columns": constrained_columns,
+ "name": pks["name"]}
+ else:
+ return {"constrained_columns": [], "name": None}
@reflection.cache
def get_schema_names(self, connection, **kw):
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index f1eacf6a6..0b0d50329 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -389,14 +389,33 @@ def create_engine(*args, **kwargs):
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
"""Create a new Engine instance using a configuration dictionary.
- The dictionary is typically produced from a config file where keys
- are prefixed, such as sqlalchemy.url, sqlalchemy.echo, etc. The
- 'prefix' argument indicates the prefix to be searched for.
+ The dictionary is typically produced from a config file.
+
+ The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
+ ``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
+ indicates the prefix to be searched for. Each matching key (after the
+ prefix is stripped) is treated as though it were the corresponding keyword
+ argument to a :func:`.create_engine` call.
+
+ The only required key is (assuming the default prefix) ``sqlalchemy.url``,
+ which provides the :ref:`database URL <database_urls>`.
A select set of keyword arguments will be "coerced" to their
expected type based on string values. The set of arguments
is extensible per-dialect using the ``engine_config_types`` accessor.
+ :param configuration: A dictionary (typically produced from a config file,
+ but this is not a requirement). Items whose keys start with the value
+ of 'prefix' will have that prefix stripped, and will then be passed to
+ :ref:`create_engine`.
+
+ :param prefix: Prefix to match and then strip from keys
+ in 'configuration'.
+
+ :param kwargs: Each keyword argument to ``engine_from_config()`` itself
+ overrides the corresponding item taken from the 'configuration'
+ dictionary. Keyword arguments should *not* be prefixed.
+
"""
options = dict((key[len(prefix):], configuration[key])
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index c5eabac0d..eaa435d45 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1531,9 +1531,13 @@ class Transaction(object):
def __init__(self, connection, parent):
self.connection = connection
- self._parent = parent or self
+ self._actual_parent = parent
self.is_active = True
+ @property
+ def _parent(self):
+ return self._actual_parent or self
+
def close(self):
"""Close this :class:`.Transaction`.
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index b2b78dee8..74a0fce77 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -221,7 +221,7 @@ class ResultMetaData(object):
in enumerate(result_columns)
]
self.keys = [
- elem[1] for elem in result_columns
+ elem[0] for elem in result_columns
]
else:
# case 2 - raw string, or number of columns in result does
@@ -236,7 +236,8 @@ class ResultMetaData(object):
# that SQLAlchemy has used up through 0.9.
if num_ctx_cols:
- result_map = self._create_result_map(result_columns)
+ result_map = self._create_result_map(
+ result_columns, case_sensitive)
raw = []
self.keys = []
@@ -329,10 +330,12 @@ class ResultMetaData(object):
])
@classmethod
- def _create_result_map(cls, result_columns):
+ def _create_result_map(cls, result_columns, case_sensitive=True):
d = {}
for elem in result_columns:
key, rec = elem[0], elem[1:]
+ if not case_sensitive:
+ key = key.lower()
if key in d:
# conflicting keyname, just double up the list
# of objects. this will cause an "ambiguous name"
@@ -492,10 +495,20 @@ class ResultProxy(object):
self._init_metadata()
def _getter(self, key):
- return self._metadata._getter(key)
+ try:
+ getter = self._metadata._getter
+ except AttributeError:
+ return self._non_result(None)
+ else:
+ return getter(key)
def _has_key(self, key):
- return self._metadata._has_key(key)
+ try:
+ has_key = self._metadata._has_key
+ except AttributeError:
+ return self._non_result(None)
+ else:
+ return has_key(key)
def _init_metadata(self):
metadata = self._cursor_description()
diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py
index a64c7d08d..8a88e40ef 100644
--- a/lib/sqlalchemy/event/attr.py
+++ b/lib/sqlalchemy/event/attr.py
@@ -51,7 +51,7 @@ class _ClsLevelDispatch(RefCollection):
"""Class-level events on :class:`._Dispatch` classes."""
__slots__ = ('name', 'arg_names', 'has_kw',
- 'legacy_signatures', '_clslevel')
+ 'legacy_signatures', '_clslevel', '__weakref__')
def __init__(self, parent_dispatch_cls, fn):
self.name = fn.__name__
@@ -230,9 +230,7 @@ class _EmptyListener(_InstanceLevelDispatch):
class _CompoundListener(_InstanceLevelDispatch):
- _exec_once = False
-
- __slots__ = '_exec_once_mutex',
+ __slots__ = '_exec_once_mutex', '_exec_once'
def _memoized_attr__exec_once_mutex(self):
return threading.Lock()
@@ -279,11 +277,14 @@ class _ListenerCollection(_CompoundListener):
"""
- __slots__ = 'parent_listeners', 'parent', 'name', 'listeners', 'propagate'
+ __slots__ = (
+ 'parent_listeners', 'parent', 'name', 'listeners',
+ 'propagate', '__weakref__')
def __init__(self, parent, target_cls):
if target_cls not in parent._clslevel:
parent.update_subclass(target_cls)
+ self._exec_once = False
self.parent_listeners = parent._clslevel[target_cls]
self.parent = parent
self.name = parent.name
@@ -339,11 +340,10 @@ class _ListenerCollection(_CompoundListener):
class _JoinedListener(_CompoundListener):
- _exec_once = False
-
__slots__ = 'parent', 'name', 'local', 'parent_listeners'
def __init__(self, parent, name, local):
+ self._exec_once = False
self.parent = parent
self.name = name
self.local = local
diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py
index f01e0b348..d8c8843f6 100644
--- a/lib/sqlalchemy/ext/baked.py
+++ b/lib/sqlalchemy/ext/baked.py
@@ -283,6 +283,26 @@ class Result(object):
raise orm_exc.MultipleResultsFound(
"Multiple rows were found for one()")
+ def one_or_none(self):
+ """Return one or zero results, or raise an exception for multiple
+ rows.
+
+ Equivalent to :meth:`.Query.one_or_none`.
+
+ .. versionadded:: 1.0.9
+
+ """
+ ret = list(self)
+
+ l = len(ret)
+ if l == 1:
+ return ret[0]
+ elif l == 0:
+ return None
+ else:
+ raise orm_exc.MultipleResultsFound(
+ "Multiple rows were found for one_or_none()")
+
def all(self):
"""Return all rows.
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py
index 9c6178264..0073494b8 100644
--- a/lib/sqlalchemy/ext/hybrid.py
+++ b/lib/sqlalchemy/ext/hybrid.py
@@ -46,7 +46,7 @@ as the class itself::
@hybrid_method
def contains(self, point):
- return (self.start <= point) & (point < self.end)
+ return (self.start <= point) & (point <= self.end)
@hybrid_method
def intersects(self, other):
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index a45c22394..5440d6b5d 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -551,6 +551,11 @@ class AttributeImpl(object):
def initialize(self, state, dict_):
"""Initialize the given state's attribute with an empty value."""
+ # As of 1.0, we don't actually set a value in
+ # dict_. This is so that the state of the object does not get
+ # modified without emitting the appropriate events.
+
+
return None
def get(self, state, dict_, passive=PASSIVE_OFF):
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index 801701be9..993385e15 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -216,14 +216,41 @@ class InstanceEvents(event.Events):
def first_init(self, manager, cls):
"""Called when the first instance of a particular mapping is called.
+ This event is called when the ``__init__`` method of a class
+ is called the first time for that particular class. The event
+ invokes before ``__init__`` actually proceeds as well as before
+ the :meth:`.InstanceEvents.init` event is invoked.
+
"""
def init(self, target, args, kwargs):
"""Receive an instance when its constructor is called.
This method is only called during a userland construction of
- an object. It is not called when an object is loaded from the
- database.
+ an object, in conjunction with the object's constructor, e.g.
+ its ``__init__`` method. It is not called when an object is
+ loaded from the database; see the :meth:`.InstanceEvents.load`
+ event in order to intercept a database load.
+
+ The event is called before the actual ``__init__`` constructor
+ of the object is called. The ``kwargs`` dictionary may be
+ modified in-place in order to affect what is passed to
+ ``__init__``.
+
+ :param target: the mapped instance. If
+ the event is configured with ``raw=True``, this will
+ instead be the :class:`.InstanceState` state-management
+ object associated with the instance.
+ :param args: positional arguments passed to the ``__init__`` method.
+ This is passed as a tuple and is currently immutable.
+ :param kwargs: keyword arguments passed to the ``__init__`` method.
+ This structure *can* be altered in place.
+
+ .. seealso::
+
+ :meth:`.InstanceEvents.init_failure`
+
+ :meth:`.InstanceEvents.load`
"""
@@ -232,8 +259,31 @@ class InstanceEvents(event.Events):
and raised an exception.
This method is only called during a userland construction of
- an object. It is not called when an object is loaded from the
- database.
+ an object, in conjunction with the object's constructor, e.g.
+ its ``__init__`` method. It is not called when an object is loaded
+ from the database.
+
+ The event is invoked after an exception raised by the ``__init__``
+ method is caught. After the event
+ is invoked, the original exception is re-raised outwards, so that
+ the construction of the object still raises an exception. The
+ actual exception and stack trace raised should be present in
+ ``sys.exc_info()``.
+
+ :param target: the mapped instance. If
+ the event is configured with ``raw=True``, this will
+ instead be the :class:`.InstanceState` state-management
+ object associated with the instance.
+ :param args: positional arguments that were passed to the ``__init__``
+ method.
+ :param kwargs: keyword arguments that were passed to the ``__init__``
+ method.
+
+ .. seealso::
+
+ :meth:`.InstanceEvents.init`
+
+ :meth:`.InstanceEvents.load`
"""
@@ -260,12 +310,23 @@ class InstanceEvents(event.Events):
``None`` if the load does not correspond to a :class:`.Query`,
such as during :meth:`.Session.merge`.
+ .. seealso::
+
+ :meth:`.InstanceEvents.init`
+
+ :meth:`.InstanceEvents.refresh`
+
+ :meth:`.SessionEvents.loaded_as_persistent`
+
"""
def refresh(self, target, context, attrs):
"""Receive an object instance after one or more attributes have
been refreshed from a query.
+ Contrast this to the :meth:`.InstanceEvents.load` method, which
+ is invoked when the object is first loaded from a query.
+
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
@@ -276,6 +337,10 @@ class InstanceEvents(event.Events):
were populated, or None if all column-mapped, non-deferred
attributes were populated.
+ .. seealso::
+
+ :meth:`.InstanceEvents.load`
+
"""
def refresh_flush(self, target, flush_context, attrs):
@@ -589,32 +654,67 @@ class MapperEvents(event.Events):
"""
def mapper_configured(self, mapper, class_):
- """Called when the mapper for the class is fully configured.
-
- This event is the latest phase of mapper construction, and
- is invoked when the mapped classes are first used, so that
- relationships between mappers can be resolved. When the event is
- called, the mapper should be in its final state.
-
- While the configuration event normally occurs automatically,
- it can be forced to occur ahead of time, in the case where the event
- is needed before any actual mapper usage, by using the
- :func:`.configure_mappers` function.
+ """Called when a specific mapper has completed its own configuration
+ within the scope of the :func:`.configure_mappers` call.
+
+ The :meth:`.MapperEvents.mapper_configured` event is invoked
+ for each mapper that is encountered when the
+ :func:`.orm.configure_mappers` function proceeds through the current
+ list of not-yet-configured mappers.
+ :func:`.orm.configure_mappers` is typically invoked
+ automatically as mappings are first used, as well as each time
+ new mappers have been made available and new mapper use is
+ detected.
+
+ When the event is called, the mapper should be in its final
+ state, but **not including backrefs** that may be invoked from
+ other mappers; they might still be pending within the
+ configuration operation. Bidirectional relationships that
+ are instead configured via the
+ :paramref:`.orm.relationship.back_populates` argument
+ *will* be fully available, since this style of relationship does not
+ rely upon other possibly-not-configured mappers to know that they
+ exist.
+ For an event that is guaranteed to have **all** mappers ready
+ to go including backrefs that are defined only on other
+ mappings, use the :meth:`.MapperEvents.after_configured`
+ event; this event invokes only after all known mappings have been
+ fully configured.
+
+ The :meth:`.MapperEvents.mapper_configured` event, unlike
+ :meth:`.MapperEvents.before_configured` or
+ :meth:`.MapperEvents.after_configured`,
+ is called for each mapper/class individually, and the mapper is
+ passed to the event itself. It also is called exactly once for
+ a particular mapper. The event is therefore useful for
+ configurational steps that benefit from being invoked just once
+ on a specific mapper basis, which don't require that "backref"
+ configurations are necessarily ready yet.
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param class\_: the mapped class.
+ .. seealso::
+
+ :meth:`.MapperEvents.before_configured`
+
+ :meth:`.MapperEvents.after_configured`
+
"""
# TODO: need coverage for this event
def before_configured(self):
"""Called before a series of mappers have been configured.
- This corresponds to the :func:`.orm.configure_mappers` call, which
- note is usually called automatically as mappings are first
- used.
+ The :meth:`.MapperEvents.before_configured` event is invoked
+ each time the :func:`.orm.configure_mappers` function is
+ invoked, before the function has done any of its work.
+ :func:`.orm.configure_mappers` is typically invoked
+ automatically as mappings are first used, as well as each time
+ new mappers have been made available and new mapper use is
+ detected.
This event can **only** be applied to the :class:`.Mapper` class
or :func:`.mapper` function, and not to individual mappings or
@@ -626,11 +726,16 @@ class MapperEvents(event.Events):
def go():
# ...
+ Constrast this event to :meth:`.MapperEvents.after_configured`,
+ which is invoked after the series of mappers has been configured,
+ as well as :meth:`.MapperEvents.mapper_configured`, which is invoked
+ on a per-mapper basis as each one is configured to the extent possible.
+
Theoretically this event is called once per
application, but is actually called any time new mappers
are to be affected by a :func:`.orm.configure_mappers`
call. If new mappings are constructed after existing ones have
- already been used, this event can be called again. To ensure
+ already been used, this event will likely be called again. To ensure
that a particular event is only called once and no further, the
``once=True`` argument (new in 0.9.4) can be applied::
@@ -643,14 +748,33 @@ class MapperEvents(event.Events):
.. versionadded:: 0.9.3
+
+ .. seealso::
+
+ :meth:`.MapperEvents.mapper_configured`
+
+ :meth:`.MapperEvents.after_configured`
+
"""
def after_configured(self):
"""Called after a series of mappers have been configured.
- This corresponds to the :func:`.orm.configure_mappers` call, which
- note is usually called automatically as mappings are first
- used.
+ The :meth:`.MapperEvents.after_configured` event is invoked
+ each time the :func:`.orm.configure_mappers` function is
+ invoked, after the function has completed its work.
+ :func:`.orm.configure_mappers` is typically invoked
+ automatically as mappings are first used, as well as each time
+ new mappers have been made available and new mapper use is
+ detected.
+
+ Contrast this event to the :meth:`.MapperEvents.mapper_configured`
+ event, which is called on a per-mapper basis while the configuration
+ operation proceeds; unlike that event, when this event is invoked,
+ all cross-configurations (e.g. backrefs) will also have been made
+ available for any mappers that were pending.
+ Also constrast to :meth:`.MapperEvents.before_configured`,
+ which is invoked before the series of mappers has been configured.
This event can **only** be applied to the :class:`.Mapper` class
or :func:`.mapper` function, and not to individual mappings or
@@ -666,7 +790,7 @@ class MapperEvents(event.Events):
application, but is actually called any time new mappers
have been affected by a :func:`.orm.configure_mappers`
call. If new mappings are constructed after existing ones have
- already been used, this event can be called again. To ensure
+ already been used, this event will likely be called again. To ensure
that a particular event is only called once and no further, the
``once=True`` argument (new in 0.9.4) can be applied::
@@ -676,6 +800,12 @@ class MapperEvents(event.Events):
def go():
# ...
+ .. seealso::
+
+ :meth:`.MapperEvents.mapper_configured`
+
+ :meth:`.MapperEvents.before_configured`
+
"""
def before_insert(self, mapper, connection, target):
@@ -697,30 +827,14 @@ class MapperEvents(event.Events):
steps.
.. warning::
- Mapper-level flush events are designed to operate **on attributes
- local to the immediate object being handled
- and via SQL operations with the given**
- :class:`.Connection` **only.** Handlers here should **not** make
- alterations to the state of the :class:`.Session` overall, and
- in general should not affect any :func:`.relationship` -mapped
- attributes, as session cascade rules will not function properly,
- nor is it always known if the related class has already been
- handled. Operations that **are not supported in mapper
- events** include:
-
- * :meth:`.Session.add`
- * :meth:`.Session.delete`
- * Mapped collection append, add, remove, delete, discard, etc.
- * Mapped relationship attribute set/del events,
- i.e. ``someobject.related = someotherobject``
-
- Operations which manipulate the state of the object
- relative to other objects are better handled:
-
- * In the ``__init__()`` method of the mapped object itself, or
- another method designed to establish some particular state.
- * In a ``@validates`` handler, see :ref:`simple_validators`
- * Within the :meth:`.SessionEvents.before_flush` event.
+
+ Mapper-level flush events only allow **very limited operations**,
+ on attributes local to the row being operated upon only,
+ as well as allowing any SQL to be emitted on the given
+ :class:`.Connection`. **Please read fully** the notes
+ at :ref:`session_persistence_mapper` for guidelines on using
+ these methods; generally, the :meth:`.SessionEvents.before_flush`
+ method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -734,6 +848,10 @@ class MapperEvents(event.Events):
object associated with the instance.
:return: No return value is supported by this event.
+ .. seealso::
+
+ :ref:`session_persistence_events`
+
"""
def after_insert(self, mapper, connection, target):
@@ -755,30 +873,14 @@ class MapperEvents(event.Events):
event->persist->event steps.
.. warning::
- Mapper-level flush events are designed to operate **on attributes
- local to the immediate object being handled
- and via SQL operations with the given**
- :class:`.Connection` **only.** Handlers here should **not** make
- alterations to the state of the :class:`.Session` overall, and in
- general should not affect any :func:`.relationship` -mapped
- attributes, as session cascade rules will not function properly,
- nor is it always known if the related class has already been
- handled. Operations that **are not supported in mapper
- events** include:
-
- * :meth:`.Session.add`
- * :meth:`.Session.delete`
- * Mapped collection append, add, remove, delete, discard, etc.
- * Mapped relationship attribute set/del events,
- i.e. ``someobject.related = someotherobject``
-
- Operations which manipulate the state of the object
- relative to other objects are better handled:
-
- * In the ``__init__()`` method of the mapped object itself,
- or another method designed to establish some particular state.
- * In a ``@validates`` handler, see :ref:`simple_validators`
- * Within the :meth:`.SessionEvents.before_flush` event.
+
+ Mapper-level flush events only allow **very limited operations**,
+ on attributes local to the row being operated upon only,
+ as well as allowing any SQL to be emitted on the given
+ :class:`.Connection`. **Please read fully** the notes
+ at :ref:`session_persistence_mapper` for guidelines on using
+ these methods; generally, the :meth:`.SessionEvents.before_flush`
+ method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -792,6 +894,10 @@ class MapperEvents(event.Events):
object associated with the instance.
:return: No return value is supported by this event.
+ .. seealso::
+
+ :ref:`session_persistence_events`
+
"""
def before_update(self, mapper, connection, target):
@@ -832,29 +938,14 @@ class MapperEvents(event.Events):
steps.
.. warning::
- Mapper-level flush events are designed to operate **on attributes
- local to the immediate object being handled
- and via SQL operations with the given** :class:`.Connection`
- **only.** Handlers here should **not** make alterations to the
- state of the :class:`.Session` overall, and in general should not
- affect any :func:`.relationship` -mapped attributes, as
- session cascade rules will not function properly, nor is it
- always known if the related class has already been handled.
- Operations that **are not supported in mapper events** include:
-
- * :meth:`.Session.add`
- * :meth:`.Session.delete`
- * Mapped collection append, add, remove, delete, discard, etc.
- * Mapped relationship attribute set/del events,
- i.e. ``someobject.related = someotherobject``
-
- Operations which manipulate the state of the object
- relative to other objects are better handled:
-
- * In the ``__init__()`` method of the mapped object itself,
- or another method designed to establish some particular state.
- * In a ``@validates`` handler, see :ref:`simple_validators`
- * Within the :meth:`.SessionEvents.before_flush` event.
+
+ Mapper-level flush events only allow **very limited operations**,
+ on attributes local to the row being operated upon only,
+ as well as allowing any SQL to be emitted on the given
+ :class:`.Connection`. **Please read fully** the notes
+ at :ref:`session_persistence_mapper` for guidelines on using
+ these methods; generally, the :meth:`.SessionEvents.before_flush`
+ method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -867,6 +958,11 @@ class MapperEvents(event.Events):
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
+
+ .. seealso::
+
+ :ref:`session_persistence_events`
+
"""
def after_update(self, mapper, connection, target):
@@ -906,29 +1002,14 @@ class MapperEvents(event.Events):
steps.
.. warning::
- Mapper-level flush events are designed to operate **on attributes
- local to the immediate object being handled
- and via SQL operations with the given** :class:`.Connection`
- **only.** Handlers here should **not** make alterations to the
- state of the :class:`.Session` overall, and in general should not
- affect any :func:`.relationship` -mapped attributes, as
- session cascade rules will not function properly, nor is it
- always known if the related class has already been handled.
- Operations that **are not supported in mapper events** include:
-
- * :meth:`.Session.add`
- * :meth:`.Session.delete`
- * Mapped collection append, add, remove, delete, discard, etc.
- * Mapped relationship attribute set/del events,
- i.e. ``someobject.related = someotherobject``
-
- Operations which manipulate the state of the object
- relative to other objects are better handled:
-
- * In the ``__init__()`` method of the mapped object itself,
- or another method designed to establish some particular state.
- * In a ``@validates`` handler, see :ref:`simple_validators`
- * Within the :meth:`.SessionEvents.before_flush` event.
+
+ Mapper-level flush events only allow **very limited operations**,
+ on attributes local to the row being operated upon only,
+ as well as allowing any SQL to be emitted on the given
+ :class:`.Connection`. **Please read fully** the notes
+ at :ref:`session_persistence_mapper` for guidelines on using
+ these methods; generally, the :meth:`.SessionEvents.before_flush`
+ method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -942,6 +1023,10 @@ class MapperEvents(event.Events):
object associated with the instance.
:return: No return value is supported by this event.
+ .. seealso::
+
+ :ref:`session_persistence_events`
+
"""
def before_delete(self, mapper, connection, target):
@@ -957,29 +1042,14 @@ class MapperEvents(event.Events):
once in a later step.
.. warning::
- Mapper-level flush events are designed to operate **on attributes
- local to the immediate object being handled
- and via SQL operations with the given** :class:`.Connection`
- **only.** Handlers here should **not** make alterations to the
- state of the :class:`.Session` overall, and in general should not
- affect any :func:`.relationship` -mapped attributes, as
- session cascade rules will not function properly, nor is it
- always known if the related class has already been handled.
- Operations that **are not supported in mapper events** include:
-
- * :meth:`.Session.add`
- * :meth:`.Session.delete`
- * Mapped collection append, add, remove, delete, discard, etc.
- * Mapped relationship attribute set/del events,
- i.e. ``someobject.related = someotherobject``
-
- Operations which manipulate the state of the object
- relative to other objects are better handled:
-
- * In the ``__init__()`` method of the mapped object itself,
- or another method designed to establish some particular state.
- * In a ``@validates`` handler, see :ref:`simple_validators`
- * Within the :meth:`.SessionEvents.before_flush` event.
+
+ Mapper-level flush events only allow **very limited operations**,
+ on attributes local to the row being operated upon only,
+ as well as allowing any SQL to be emitted on the given
+ :class:`.Connection`. **Please read fully** the notes
+ at :ref:`session_persistence_mapper` for guidelines on using
+ these methods; generally, the :meth:`.SessionEvents.before_flush`
+ method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -993,6 +1063,10 @@ class MapperEvents(event.Events):
object associated with the instance.
:return: No return value is supported by this event.
+ .. seealso::
+
+ :ref:`session_persistence_events`
+
"""
def after_delete(self, mapper, connection, target):
@@ -1008,29 +1082,14 @@ class MapperEvents(event.Events):
once in a previous step.
.. warning::
- Mapper-level flush events are designed to operate **on attributes
- local to the immediate object being handled
- and via SQL operations with the given** :class:`.Connection`
- **only.** Handlers here should **not** make alterations to the
- state of the :class:`.Session` overall, and in general should not
- affect any :func:`.relationship` -mapped attributes, as
- session cascade rules will not function properly, nor is it
- always known if the related class has already been handled.
- Operations that **are not supported in mapper events** include:
-
- * :meth:`.Session.add`
- * :meth:`.Session.delete`
- * Mapped collection append, add, remove, delete, discard, etc.
- * Mapped relationship attribute set/del events,
- i.e. ``someobject.related = someotherobject``
-
- Operations which manipulate the state of the object
- relative to other objects are better handled:
-
- * In the ``__init__()`` method of the mapped object itself,
- or another method designed to establish some particular state.
- * In a ``@validates`` handler, see :ref:`simple_validators`
- * Within the :meth:`.SessionEvents.before_flush` event.
+
+ Mapper-level flush events only allow **very limited operations**,
+ on attributes local to the row being operated upon only,
+ as well as allowing any SQL to be emitted on the given
+ :class:`.Connection`. **Please read fully** the notes
+ at :ref:`session_persistence_mapper` for guidelines on using
+ these methods; generally, the :meth:`.SessionEvents.before_flush`
+ method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -1044,6 +1103,10 @@ class MapperEvents(event.Events):
object associated with the instance.
:return: No return value is supported by this event.
+ .. seealso::
+
+ :ref:`session_persistence_events`
+
"""
@@ -1284,6 +1347,8 @@ class SessionEvents(event.Events):
:meth:`~.SessionEvents.after_flush_postexec`
+ :ref:`session_persistence_events`
+
"""
def after_flush(self, session, flush_context):
@@ -1304,6 +1369,8 @@ class SessionEvents(event.Events):
:meth:`~.SessionEvents.after_flush_postexec`
+ :ref:`session_persistence_events`
+
"""
def after_flush_postexec(self, session, flush_context):
@@ -1326,6 +1393,8 @@ class SessionEvents(event.Events):
:meth:`~.SessionEvents.after_flush`
+ :ref:`session_persistence_events`
+
"""
def after_begin(self, session, transaction, connection):
@@ -1363,6 +1432,8 @@ class SessionEvents(event.Events):
:meth:`~.SessionEvents.after_attach`
+ :ref:`session_lifecycle_events`
+
"""
def after_attach(self, session, instance):
@@ -1385,6 +1456,8 @@ class SessionEvents(event.Events):
:meth:`~.SessionEvents.before_attach`
+ :ref:`session_lifecycle_events`
+
"""
@event._legacy_signature("0.9",
@@ -1439,6 +1512,244 @@ class SessionEvents(event.Events):
"""
+ def transient_to_pending(self, session, instance):
+ """Intercept the "transient to pending" transition for a specific object.
+
+ This event is a specialization of the
+ :meth:`.SessionEvents.after_attach` event which is only invoked
+ for this specific transition. It is invoked typically during the
+ :meth:`.Session.add` call.
+
+ :param session: target :class:`.Session`
+
+ :param instance: the ORM-mapped instance being operated upon.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
+ def pending_to_transient(self, session, instance):
+ """Intercept the "pending to transient" transition for a specific object.
+
+ This less common transition occurs when an pending object that has
+ not been flushed is evicted from the session; this can occur
+ when the :meth:`.Session.rollback` method rolls back the transaction,
+ or when the :meth:`.Session.expunge` method is used.
+
+ :param session: target :class:`.Session`
+
+ :param instance: the ORM-mapped instance being operated upon.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
+ def persistent_to_transient(self, session, instance):
+ """Intercept the "persistent to transient" transition for a specific object.
+
+ This less common transition occurs when an pending object that has
+ has been flushed is evicted from the session; this can occur
+ when the :meth:`.Session.rollback` method rolls back the transaction.
+
+ :param session: target :class:`.Session`
+
+ :param instance: the ORM-mapped instance being operated upon.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
+ def pending_to_persistent(self, session, instance):
+ """Intercept the "pending to persistent"" transition for a specific object.
+
+ This event is invoked within the flush process, and is
+ similar to scanning the :attr:`.Session.new` collection within
+ the :meth:`.SessionEvents.after_flush` event. However, in this
+ case the object has already been moved to the persistent state
+ when the event is called.
+
+ :param session: target :class:`.Session`
+
+ :param instance: the ORM-mapped instance being operated upon.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
+ def detached_to_persistent(self, session, instance):
+ """Intercept the "detached to persistent" transition for a specific object.
+
+ This event is a specialization of the
+ :meth:`.SessionEvents.after_attach` event which is only invoked
+ for this specific transition. It is invoked typically during the
+ :meth:`.Session.add` call, as well as during the
+ :meth:`.Session.delete` call if the object was not previously
+ associated with the
+ :class:`.Session` (note that an object marked as "deleted" remains
+ in the "persistent" state until the flush proceeds).
+
+ .. note::
+
+ If the object becomes persistent as part of a call to
+ :meth:`.Session.delete`, the object is **not** yet marked as
+ deleted when this event is called. To detect deleted objects,
+ check the ``deleted`` flag sent to the
+ :meth:`.SessionEvents.persistent_to_detached` to event after the
+ flush proceeds, or check the :attr:`.Session.deleted` collection
+ within the :meth:`.SessionEvents.before_flush` event if deleted
+ objects need to be intercepted before the flush.
+
+ :param session: target :class:`.Session`
+
+ :param instance: the ORM-mapped instance being operated upon.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
+ def loaded_as_persistent(self, session, instance):
+ """Intercept the "loaded as peristent" transition for a specific object.
+
+ This event is invoked within the ORM loading process, and is invoked
+ very similarly to the :meth:`.InstanceEvents.load` event. However,
+ the event here is linkable to a :class:`.Session` class or instance,
+ rather than to a mapper or class hierarchy, and integrates
+ with the other session lifecycle events smoothly. The object
+ is guaranteed to be present in the session's identity map when
+ this event is called.
+
+
+ :param session: target :class:`.Session`
+
+ :param instance: the ORM-mapped instance being operated upon.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
+ def persistent_to_deleted(self, session, instance):
+ """Intercept the "persistent to deleted" transition for a specific object.
+
+ This event is invoked when a persistent object's identity
+ is deleted from the database within a flush, however the object
+ still remains associated with the :class:`.Session` until the
+ transaction completes.
+
+ If the transaction is rolled back, the object moves again
+ to the persistent state, and the
+ :meth:`.SessionEvents.deleted_to_persistent` event is called.
+ If the transaction is committed, the object becomes detached,
+ which will emit the :meth:`.SessionEvents.deleted_to_detached`
+ event.
+
+ Note that while the :meth:`.Session.delete` method is the primary
+ public interface to mark an object as deleted, many objects
+ get deleted due to cascade rules, which are not always determined
+ until flush time. Therefore, there's no way to catch
+ every object that will be deleted until the flush has proceeded.
+ the :meth:`.SessionEvents.persistent_to_deleted` event is therefore
+ invoked at the end of a flush.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
+ def deleted_to_persistent(self, session, instance):
+ """Intercept the "deleted to persistent" transition for a specific object.
+
+ This transition occurs only when an object that's been deleted
+ successfully in a flush is restored due to a call to
+ :meth:`.Session.rollback`. The event is not called under
+ any other circumstances.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
+ def deleted_to_detached(self, session, instance):
+ """Intercept the "deleted to detached" transition for a specific object.
+
+ This event is invoked when a deleted object is evicted
+ from the session. The typical case when this occurs is when
+ the transaction for a :class:`.Session` in which the object
+ was deleted is committed; the object moves from the deleted
+ state to the detached state.
+
+ It is also invoked for objects that were deleted in a flush
+ when the :meth:`.Session.expunge_all` or :meth:`.Session.close`
+ events are called, as well as if the object is individually
+ expunged from its deleted state via :meth:`.Session.expunge`.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
+ def persistent_to_detached(self, session, instance):
+ """Intercept the "persistent to detached" transition for a specific object.
+
+ This event is invoked when a persistent object is evicted
+ from the session. There are many conditions that cause this
+ to happen, including:
+
+ * using a method such as :meth:`.Session.expunge`
+ or :meth:`.Session.close`
+
+ * Calling the :meth:`.Session.rollback` method, when the object
+ was part of an INSERT statement for that session's transaction
+
+
+ :param session: target :class:`.Session`
+
+ :param instance: the ORM-mapped instance being operated upon.
+
+ :param deleted: boolean. If True, indicates this object moved
+ to the detached state because it was marked as deleted and flushed.
+
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_lifecycle_events`
+
+ """
+
class AttributeEvents(event.Events):
"""Define events for object attributes.
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index 46be2b719..2dfe3fd5c 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -8,7 +8,8 @@
import weakref
from . import attributes
from .. import util
-
+from .. import exc as sa_exc
+from . import util as orm_util
class IdentityMap(object):
def __init__(self):
@@ -126,16 +127,18 @@ class WeakInstanceDict(IdentityMap):
if existing_state is not state:
o = existing_state.obj()
if o is not None:
- raise AssertionError(
- "A conflicting state is already "
- "present in the identity map for key %r"
- % (key, ))
+ raise sa_exc.InvalidRequestError(
+ "Can't attach instance "
+ "%s; another instance with key %s is already "
+ "present in this session." % (
+ orm_util.state_str(state), state.key))
else:
- return
+ return False
except KeyError:
pass
self._dict[key] = state
self._manage_incoming_state(state)
+ return True
def _add_unpresent(self, state, key):
# inlined form of add() called by loading.py
@@ -208,6 +211,18 @@ class WeakInstanceDict(IdentityMap):
class StrongInstanceDict(IdentityMap):
+ """A 'strong-referencing' version of the identity map.
+
+ .. deprecated 1.1::
+ The strong
+ reference identity map is legacy. See the
+ recipe at :ref:`session_referencing_behavior` for
+ an event-based approach to maintaining strong identity
+ references.
+
+
+ """
+
if util.py2k:
def itervalues(self):
return self._dict.itervalues()
@@ -256,12 +271,16 @@ class StrongInstanceDict(IdentityMap):
def add(self, state):
if state.key in self:
if attributes.instance_state(self._dict[state.key]) is not state:
- raise AssertionError('A conflicting state is already '
- 'present in the identity map for key %r'
- % (state.key, ))
+ raise sa_exc.InvalidRequestError(
+ "Can't attach instance "
+ "%s; another instance with key %s is already "
+ "present in this session." % (
+ orm_util.state_str(state), state.key))
+ return False
else:
self._dict[state.key] = state.obj()
self._manage_incoming_state(state)
+ return True
def _add_unpresent(self, state, key):
# inlined form of add() called by loading.py
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index b81e98a58..c90308a69 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -32,8 +32,7 @@ def instances(query, cursor, context):
context.runid = _new_runid()
- filter_fns = [ent.filter_fn for ent in query._entities]
- filtered = id in filter_fns
+ filtered = query._has_mapper_entities
single_entity = len(query._entities) == 1 and \
query._entities[0].supports_single_entity
@@ -43,7 +42,12 @@ def instances(query, cursor, context):
filter_fn = id
else:
def filter_fn(row):
- return tuple(fn(x) for x, fn in zip(row, filter_fns))
+ return tuple(
+ id(item)
+ if ent.use_id_for_hash
+ else item
+ for ent, item in zip(query._entities, row)
+ )
try:
(process, labels) = \
@@ -335,6 +339,9 @@ def _instance_processor(
populate_existing = context.populate_existing or mapper.always_refresh
load_evt = bool(mapper.class_manager.dispatch.load)
refresh_evt = bool(mapper.class_manager.dispatch.refresh)
+ persistent_evt = bool(context.session.dispatch.loaded_as_persistent)
+ if persistent_evt:
+ loaded_as_persistent = context.session.dispatch.loaded_as_persistent
instance_state = attributes.instance_state
instance_dict = attributes.instance_dict
session_id = context.session.hash_key
@@ -428,8 +435,11 @@ def _instance_processor(
loaded_instance, populate_existing, populators)
if isnew:
- if loaded_instance and load_evt:
- state.manager.dispatch.load(state, context)
+ if loaded_instance:
+ if load_evt:
+ state.manager.dispatch.load(state, context)
+ if persistent_evt:
+ loaded_as_persistent(context.session, state.obj())
elif refresh_evt:
state.manager.dispatch.refresh(
state, context, only_load_props)
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 48fbaae32..5ade4b966 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -1915,6 +1915,19 @@ class Mapper(InspectionAttr):
"""
@_memoized_configured_property
+ def _insert_cols_evaluating_none(self):
+ return dict(
+ (
+ table,
+ frozenset(
+ col.key for col in columns
+ if col.type.should_evaluate_none
+ )
+ )
+ for table, columns in self._cols_by_table.items()
+ )
+
+ @_memoized_configured_property
def _insert_cols_as_none(self):
return dict(
(
@@ -1922,7 +1935,8 @@ class Mapper(InspectionAttr):
frozenset(
col.key for col in columns
if not col.primary_key and
- not col.server_default and not col.default)
+ not col.server_default and not col.default
+ and not col.type.should_evaluate_none)
)
for table, columns in self._cols_by_table.items()
)
@@ -2557,15 +2571,24 @@ class Mapper(InspectionAttr):
for all relationships that meet the given cascade rule.
:param type_:
- The name of the cascade rule (i.e. save-update, delete,
- etc.)
+ The name of the cascade rule (i.e. ``"save-update"``, ``"delete"``,
+ etc.).
+
+ .. note:: the ``"all"`` cascade is not accepted here. For a generic
+ object traversal function, see :ref:`faq_walk_objects`.
:param state:
The lead InstanceState. child items will be processed per
the relationships defined for this object's mapper.
- the return value are object instances; this provides a strong
- reference so that they don't fall out of scope immediately.
+ :return: the method yields individual object instances.
+
+ .. seealso::
+
+ :ref:`unitofwork_cascades`
+
+ :ref:`faq_walk_objects` - illustrates a generic function to
+ traverse all objects without relying on cascades.
"""
visited_states = set()
@@ -2682,7 +2705,33 @@ def configure_mappers():
have been constructed thus far.
This function can be called any number of times, but in
- most cases is handled internally.
+ most cases is invoked automatically, the first time mappings are used,
+ as well as whenever mappings are used and additional not-yet-configured
+ mappers have been constructed.
+
+ Points at which this occur include when a mapped class is instantiated
+ into an instance, as well as when the :meth:`.Session.query` method
+ is used.
+
+ The :func:`.configure_mappers` function provides several event hooks
+ that can be used to augment its functionality. These methods include:
+
+ * :meth:`.MapperEvents.before_configured` - called once before
+ :func:`.configure_mappers` does any work; this can be used to establish
+ additional options, properties, or related mappings before the operation
+ proceeds.
+
+ * :meth:`.MapperEvents.mapper_configured` - called as each indivudal
+ :class:`.Mapper` is configured within the process; will include all
+ mapper state except for backrefs set up by other mappers that are still
+ to be configured.
+
+ * :meth:`.MapperEvents.after_configured` - called once after
+ :func:`.configure_mappers` is complete; at this stage, all
+ :class:`.Mapper` objects that are known to SQLAlchemy will be fully
+ configured. Note that the calling application may still have other
+ mappings that haven't been produced yet, such as if they are in modules
+ as yet unimported.
"""
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 0bfee2ece..d89a93dd3 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -375,10 +375,12 @@ def _collect_insert_commands(
propkey_to_col = mapper._propkey_to_col[table]
+ eval_none = mapper._insert_cols_evaluating_none[table]
+
for propkey in set(propkey_to_col).intersection(state_dict):
value = state_dict[propkey]
col = propkey_to_col[propkey]
- if value is None:
+ if value is None and propkey not in eval_none:
continue
elif not bulk and isinstance(value, sql.ClauseElement):
value_params[col.key] = value
@@ -670,15 +672,18 @@ def _emit_update_statements(base_mapper, uowtransaction,
connection, value_params in records:
c = cached_connections[connection].\
execute(statement, params)
- _postfetch(
- mapper,
- uowtransaction,
- table,
- state,
- state_dict,
- c,
- c.context.compiled_parameters[0],
- value_params)
+
+ # TODO: why with bookkeeping=False?
+ if bookkeeping:
+ _postfetch(
+ mapper,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ c,
+ c.context.compiled_parameters[0],
+ value_params)
rows += c.rowcount
else:
multiparams = [rec[2] for rec in records]
@@ -692,17 +697,19 @@ def _emit_update_statements(base_mapper, uowtransaction,
execute(statement, multiparams)
rows += c.rowcount
+
for state, state_dict, params, mapper, \
connection, value_params in records:
- _postfetch(
- mapper,
- uowtransaction,
- table,
- state,
- state_dict,
- c,
- c.context.compiled_parameters[0],
- value_params)
+ if bookkeeping:
+ _postfetch(
+ mapper,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ c,
+ c.context.compiled_parameters[0],
+ value_params)
if check_rowcount:
if rows != len(records):
@@ -752,15 +759,18 @@ def _emit_insert_statements(base_mapper, uowtransaction,
conn, value_params, has_all_pks, has_all_defaults), \
last_inserted_params in \
zip(records, c.context.compiled_parameters):
- _postfetch(
- mapper_rec,
- uowtransaction,
- table,
- state,
- state_dict,
- c,
- last_inserted_params,
- value_params)
+ if state:
+ _postfetch(
+ mapper_rec,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ c,
+ last_inserted_params,
+ value_params)
+ else:
+ _postfetch_bulk_save(mapper_rec, state_dict, table)
else:
if not has_all_defaults and base_mapper.eager_defaults:
@@ -789,15 +799,19 @@ def _emit_insert_statements(base_mapper, uowtransaction,
prop = mapper_rec._columntoproperty[col]
if state_dict.get(prop.key) is None:
state_dict[prop.key] = pk
- _postfetch(
- mapper_rec,
- uowtransaction,
- table,
- state,
- state_dict,
- result,
- result.context.compiled_parameters[0],
- value_params)
+ if bookkeeping:
+ if state:
+ _postfetch(
+ mapper_rec,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ result,
+ result.context.compiled_parameters[0],
+ value_params)
+ else:
+ _postfetch_bulk_save(mapper_rec, state_dict, table)
def _emit_post_update_statements(base_mapper, uowtransaction,
@@ -957,7 +971,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, states):
def _postfetch(mapper, uowtransaction, table,
- state, dict_, result, params, value_params, bulk=False):
+ state, dict_, result, params, value_params):
"""Expire attributes in need of newly persisted database state,
after an INSERT or UPDATE statement has proceeded for that
state."""
@@ -1005,13 +1019,15 @@ def _postfetch(mapper, uowtransaction, table,
# TODO: this still goes a little too often. would be nice to
# have definitive list of "columns that changed" here
for m, equated_pairs in mapper._table_to_equated[table]:
- if state is None:
- sync.bulk_populate_inherit_keys(dict_, m, equated_pairs)
- else:
- sync.populate(state, m, state, m,
- equated_pairs,
- uowtransaction,
- mapper.passive_updates)
+ sync.populate(state, m, state, m,
+ equated_pairs,
+ uowtransaction,
+ mapper.passive_updates)
+
+
+def _postfetch_bulk_save(mapper, dict_, table):
+ for m, equated_pairs in mapper._table_to_equated[table]:
+ sync.bulk_populate_inherit_keys(dict_, m, equated_pairs)
def _connections_for_states(base_mapper, uowtransaction, states):
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 55e02984b..b1f1c61c4 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -39,7 +39,7 @@ class ColumnProperty(StrategizedProperty):
'instrument', 'comparator_factory', 'descriptor', 'extension',
'active_history', 'expire_on_flush', 'info', 'doc',
'strategy_class', '_creation_order', '_is_polymorphic_discriminator',
- '_mapped_by_synonym', '_deferred_loader')
+ '_mapped_by_synonym', '_deferred_column_loader')
def __init__(self, *columns, **kwargs):
"""Provide a column-level property for use with a Mapper.
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 8b3df08e7..0af22b229 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -103,6 +103,7 @@ class Query(object):
_orm_only_adapt = True
_orm_only_from_obj_alias = True
_current_path = _path_registry
+ _has_mapper_entities = False
def __init__(self, entities, session=None):
self.session = session
@@ -114,6 +115,7 @@ class Query(object):
entity_wrapper = _QueryEntity
self._entities = []
self._primary_entity = None
+ self._has_mapper_entities = False
for ent in util.to_list(entities):
entity_wrapper(self, ent)
@@ -608,6 +610,16 @@ class Query(object):
When the `Query` actually issues SQL to load rows, it always
uses column labeling.
+ .. note:: The :meth:`.Query.with_labels` method *only* applies
+ the output of :attr:`.Query.statement`, and *not* to any of
+ the result-row invoking systems of :class:`.Query` itself, e.g.
+ :meth:`.Query.first`, :meth:`.Query.all`, etc. To execute
+ a query using :meth:`.Query.with_labels`, invoke the
+ :attr:`.Query.statement` using :meth:`.Session.execute`::
+
+ result = session.execute(query.with_labels().statement)
+
+
"""
self._with_labels = True
@@ -1280,7 +1292,9 @@ class Query(object):
session.query(MyClass).filter(MyClass.name == 'some name')
- Multiple criteria are joined together by AND::
+ Multiple criteria may be specified as comma separated; the effect
+ is that they will be joined together using the :func:`.and_`
+ function::
session.query(MyClass).\\
filter(MyClass.name == 'some name', MyClass.id > 5)
@@ -1289,9 +1303,6 @@ class Query(object):
WHERE clause of a select. String expressions are coerced
into SQL expression constructs via the :func:`.text` construct.
- .. versionchanged:: 0.7.5
- Multiple criteria joined by AND.
-
.. seealso::
:meth:`.Query.filter_by` - filter on keyword expressions.
@@ -1315,7 +1326,9 @@ class Query(object):
session.query(MyClass).filter_by(name = 'some name')
- Multiple criteria are joined together by AND::
+ Multiple criteria may be specified as comma separated; the effect
+ is that they will be joined together using the :func:`.and_`
+ function::
session.query(MyClass).\\
filter_by(name = 'some name', id = 5)
@@ -2323,6 +2336,19 @@ class Query(object):
"""Apply a ``DISTINCT`` to the query and return the newly resulting
``Query``.
+
+ .. note::
+
+ The :meth:`.distinct` call includes logic that will automatically
+ add columns from the ORDER BY of the query to the columns
+ clause of the SELECT statement, to satisfy the common need
+ of the database backend that ORDER BY columns be part of the
+ SELECT list when DISTINCT is used. These columns *are not*
+ added to the list of columns actually fetched by the
+ :class:`.Query`, however, so would not affect results.
+ The columns are passed through when using the
+ :attr:`.Query.statement` accessor, however.
+
:param \*expr: optional column expressions. When present,
the Postgresql dialect will render a ``DISTINCT ON (<expressions>>)``
construct.
@@ -2448,6 +2474,40 @@ class Query(object):
else:
return None
+ def one_or_none(self):
+ """Return at most one result or raise an exception.
+
+ Returns ``None`` if the query selects
+ no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound``
+ if multiple object identities are returned, or if multiple
+ rows are returned for a query that does not return object
+ identities.
+
+ Note that an entity query, that is, one which selects one or
+ more mapped classes as opposed to individual column attributes,
+ may ultimately represent many rows but only one row of
+ unique entity or entities - this is a successful result for
+ `one_or_none()`.
+
+ Calling ``one_or_none()`` results in an execution of the underlying
+ query.
+
+ .. versionadded:: 1.0.9
+
+ Added :meth:`.Query.one_or_none`
+
+ """
+ ret = list(self)
+
+ l = len(ret)
+ if l == 1:
+ return ret[0]
+ elif l == 0:
+ return None
+ else:
+ raise orm_exc.MultipleResultsFound(
+ "Multiple rows were found for one_or_none()")
+
def one(self):
"""Return exactly one result or raise an exception.
@@ -2469,6 +2529,12 @@ class Query(object):
any kind of limit, so that the "unique"-ing of entities does not
conceal multiple object identities.
+ .. seealso::
+
+ :meth:`.Query.first`
+
+ :meth:`.Query.one_or_none`
+
"""
ret = list(self)
@@ -3181,12 +3247,14 @@ class _MapperEntity(_QueryEntity):
if not query._primary_entity:
query._primary_entity = self
query._entities.append(self)
-
+ query._has_mapper_entities = True
self.entities = [entity]
self.expr = entity
supports_single_entity = True
+ use_id_for_hash = True
+
def setup_entity(self, ext_info, aliased_adapter):
self.mapper = ext_info.mapper
self.aliased_adapter = aliased_adapter
@@ -3232,8 +3300,6 @@ class _MapperEntity(_QueryEntity):
self.mapper, sql_util.ColumnAdapter(
from_obj, self.mapper._equivalent_columns))
- filter_fn = id
-
@property
def type(self):
return self.mapper.class_
@@ -3462,6 +3528,8 @@ class Bundle(InspectionAttr):
class _BundleEntity(_QueryEntity):
+ use_id_for_hash = False
+
def __init__(self, query, bundle, setup_entities=True):
query._entities.append(self)
self.bundle = self.expr = bundle
@@ -3478,8 +3546,6 @@ class _BundleEntity(_QueryEntity):
self.entities = ()
- self.filter_fn = lambda item: item
-
self.supports_single_entity = self.bundle.single_entity
@property
@@ -3582,11 +3648,7 @@ class _ColumnEntity(_QueryEntity):
search_entities = True
self.type = type_ = column.type
- if type_.hashable:
- self.filter_fn = lambda item: item
- else:
- counter = util.counter()
- self.filter_fn = lambda item: counter()
+ self.use_id_for_hash = not type_.hashable
# If the Column is unnamed, give it a
# label() so that mutable column expressions
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index da0730f46..552ce8b69 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -604,30 +604,26 @@ class RelationshipProperty(StrategizedProperty):
and examples.
:param passive_updates=True:
- Indicates loading and INSERT/UPDATE/DELETE behavior when the
- source of a foreign key value changes (i.e. an "on update"
- cascade), which are typically the primary key columns of the
- source row.
+ Indicates the persistence behavior to take when a referenced
+ primary key value changes in place, indicating that the referencing
+ foreign key columns will also need their value changed.
- When True, it is assumed that ON UPDATE CASCADE is configured on
+ When True, it is assumed that ``ON UPDATE CASCADE`` is configured on
the foreign key in the database, and that the database will
handle propagation of an UPDATE from a source column to
- dependent rows. Note that with databases which enforce
- referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables),
- ON UPDATE CASCADE is required for this operation. The
- relationship() will update the value of the attribute on related
- items which are locally present in the session during a flush.
-
- When False, it is assumed that the database does not enforce
- referential integrity and will not be issuing its own CASCADE
- operation for an update. The relationship() will issue the
- appropriate UPDATE statements to the database in response to the
- change of a referenced key, and items locally present in the
- session during a flush will also be refreshed.
-
- This flag should probably be set to False if primary key changes
- are expected and the database in use doesn't support CASCADE
- (i.e. SQLite, MySQL MyISAM tables).
+ dependent rows. When False, the SQLAlchemy :func:`.relationship`
+ construct will attempt to emit its own UPDATE statements to
+ modify related targets. However note that SQLAlchemy **cannot**
+ emit an UPDATE for more than one level of cascade. Also,
+ setting this flag to False is not compatible in the case where
+ the database is in fact enforcing referential integrity, unless
+ those constraints are explicitly "deferred", if the target backend
+ supports it.
+
+ It is highly advised that an application which is employing
+ mutable primary keys keeps ``passive_updates`` set to True,
+ and instead uses the referential integrity features of the database
+ itself in order to handle the change efficiently and fully.
.. seealso::
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 4619027e5..c726443f6 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -180,8 +180,7 @@ class SessionTransaction(object):
if self.session._enable_transaction_accounting:
self._take_snapshot()
- if self.session.dispatch.after_transaction_create:
- self.session.dispatch.after_transaction_create(self.session, self)
+ self.session.dispatch.after_transaction_create(self.session, self)
@property
def is_active(self):
@@ -272,10 +271,9 @@ class SessionTransaction(object):
def _restore_snapshot(self, dirty_only=False):
assert self._is_transaction_boundary
- for s in set(self._new).union(self.session._new):
- self.session._expunge_state(s)
- if s.key:
- del s.key
+ self.session._expunge_states(
+ set(self._new).union(self.session._new),
+ to_transient=True)
for s, (oldkey, newkey) in self._key_switches.items():
self.session.identity_map.safe_discard(s)
@@ -283,10 +281,7 @@ class SessionTransaction(object):
self.session.identity_map.replace(s)
for s in set(self._deleted).union(self.session._deleted):
- if s.deleted:
- # assert s in self._deleted
- del s.deleted
- self.session._update_impl(s, discard_existing=True)
+ self.session._update_impl(s, revert_deletion=True)
assert not self.session._deleted
@@ -300,8 +295,9 @@ class SessionTransaction(object):
if not self.nested and self.session.expire_on_commit:
for s in self.session.identity_map.all_states():
s._expire(s.dict, self.session.identity_map._modified)
- for s in list(self._deleted):
- s._detach()
+
+ statelib.InstanceState._detach_states(
+ list(self._deleted), self.session)
self._deleted.clear()
elif self.nested:
self._parent._new.update(self._new)
@@ -466,8 +462,7 @@ class SessionTransaction(object):
transaction.close()
self._state = CLOSED
- if self.session.dispatch.after_transaction_end:
- self.session.dispatch.after_transaction_end(self.session, self)
+ self.session.dispatch.after_transaction_end(self.session, self)
if self._parent is None:
if not self.session.autocommit:
@@ -629,16 +624,23 @@ class Session(_SessionClassMethods):
:param weak_identity_map: Defaults to ``True`` - when set to
``False``, objects placed in the :class:`.Session` will be
strongly referenced until explicitly removed or the
- :class:`.Session` is closed. **Deprecated** - this option
- is obsolete.
+ :class:`.Session` is closed. **Deprecated** - The strong
+ reference identity map is legacy. See the
+ recipe at :ref:`session_referencing_behavior` for
+ an event-based approach to maintaining strong identity
+ references.
"""
if weak_identity_map:
self._identity_cls = identity.WeakInstanceDict
else:
- util.warn_deprecated("weak_identity_map=False is deprecated. "
- "This feature is not needed.")
+ util.warn_deprecated(
+ "weak_identity_map=False is deprecated. "
+ "See the documentation on 'Session Referencing Behavior' "
+ "for an event-based approach to maintaining strong identity "
+ "references.")
+
self._identity_cls = identity.StrongInstanceDict
self.identity_map = self._identity_cls()
@@ -1086,16 +1088,15 @@ class Session(_SessionClassMethods):
``Session``.
"""
- for state in self.identity_map.all_states() + list(self._new):
- state._detach()
+ all_states = self.identity_map.all_states() + list(self._new)
self.identity_map = self._identity_cls()
self._new = {}
self._deleted = {}
- # TODO: need much more test coverage for bind_mapper() and similar !
- # TODO: + crystallize + document resolution order
- # vis. bind_mapper/bind_table
+ statelib.InstanceState._detach_states(
+ all_states, self
+ )
def _add_bind(self, key, bind):
try:
@@ -1437,7 +1438,7 @@ class Session(_SessionClassMethods):
state._expire(state.dict, self.identity_map._modified)
elif state in self._new:
self._new.pop(state)
- state._detach()
+ state._detach(self)
@util.deprecated("0.7", "The non-weak-referencing identity map "
"feature is no longer needed.")
@@ -1472,23 +1473,26 @@ class Session(_SessionClassMethods):
cascaded = list(state.manager.mapper.cascade_iterator(
'expunge', state))
- self._expunge_state(state)
- for o, m, st_, dct_ in cascaded:
- self._expunge_state(st_)
+ self._expunge_states(
+ [state] + [st_ for o, m, st_, dct_ in cascaded]
+ )
- def _expunge_state(self, state):
- if state in self._new:
- self._new.pop(state)
- state._detach()
- elif self.identity_map.contains_state(state):
- self.identity_map.safe_discard(state)
- self._deleted.pop(state, None)
- state._detach()
- elif self.transaction:
- self.transaction._deleted.pop(state, None)
- state._detach()
+ def _expunge_states(self, states, to_transient=False):
+ for state in states:
+ if state in self._new:
+ self._new.pop(state)
+ elif self.identity_map.contains_state(state):
+ self.identity_map.safe_discard(state)
+ self._deleted.pop(state, None)
+ elif self.transaction:
+ # state is "detached" from being deleted, but still present
+ # in the transaction snapshot
+ self.transaction._deleted.pop(state, None)
+ statelib.InstanceState._detach_states(
+ states, self, to_transient=to_transient)
def _register_newly_persistent(self, states):
+ pending_to_persistent = self.dispatch.pending_to_persistent or None
for state in states:
mapper = _state_mapper(state)
@@ -1535,6 +1539,11 @@ class Session(_SessionClassMethods):
)
self._register_altered(states)
+
+ if pending_to_persistent is not None:
+ for state in states:
+ pending_to_persistent(self, state.obj())
+
# remove from new last, might be the last strong ref
for state in set(states).intersection(self._new):
self._new.pop(state)
@@ -1548,13 +1557,19 @@ class Session(_SessionClassMethods):
self.transaction._dirty[state] = True
def _remove_newly_deleted(self, states):
+ persistent_to_deleted = self.dispatch.persistent_to_deleted or None
for state in states:
if self._enable_transaction_accounting and self.transaction:
self.transaction._deleted[state] = True
self.identity_map.safe_discard(state)
self._deleted.pop(state, None)
- state.deleted = True
+ state._deleted = True
+ # can't call state._detach() here, because this state
+ # is still in the transaction snapshot and needs to be
+ # tracked as part of that
+ if persistent_to_deleted is not None:
+ persistent_to_deleted(self, state.obj())
def add(self, instance, _warn=True):
"""Place an object in the ``Session``.
@@ -1609,30 +1624,39 @@ class Session(_SessionClassMethods):
except exc.NO_STATE:
raise exc.UnmappedInstanceError(instance)
+ self._delete_impl(state, instance, head=True)
+
+ def _delete_impl(self, state, obj, head):
+
if state.key is None:
- raise sa_exc.InvalidRequestError(
- "Instance '%s' is not persisted" %
- state_str(state))
+ if head:
+ raise sa_exc.InvalidRequestError(
+ "Instance '%s' is not persisted" %
+ state_str(state))
+ else:
+ return
+
+ to_attach = self._before_attach(state, obj)
if state in self._deleted:
return
- # ensure object is attached to allow the
- # cascade operation to load deferred attributes
- # and collections
- self._attach(state, include_before=True)
+ if to_attach:
+ self.identity_map.add(state)
+ self._after_attach(state, obj)
- # grab the cascades before adding the item to the deleted list
- # so that autoflush does not delete the item
- # the strong reference to the instance itself is significant here
- cascade_states = list(state.manager.mapper.cascade_iterator(
- 'delete', state))
+ if head:
+ # grab the cascades before adding the item to the deleted list
+ # so that autoflush does not delete the item
+ # the strong reference to the instance itself is significant here
+ cascade_states = list(state.manager.mapper.cascade_iterator(
+ 'delete', state))
- self._deleted[state] = state.obj()
- self.identity_map.add(state)
+ self._deleted[state] = obj
- for o, m, st_, dct_ in cascade_states:
- self._delete_impl(st_)
+ if head:
+ for o, m, st_, dct_ in cascade_states:
+ self._delete_impl(st_, o, False)
def merge(self, instance, load=True):
"""Copy the state of a given instance into a corresponding instance
@@ -1809,35 +1833,47 @@ class Session(_SessionClassMethods):
"Object '%s' already has an identity - "
"it can't be registered as pending" % state_str(state))
- self._before_attach(state)
+ obj = state.obj()
+ to_attach = self._before_attach(state, obj)
if state not in self._new:
- self._new[state] = state.obj()
+ self._new[state] = obj
state.insert_order = len(self._new)
- self._attach(state)
-
- def _update_impl(self, state, discard_existing=False):
- if (self.identity_map.contains_state(state) and
- state not in self._deleted):
- return
+ if to_attach:
+ self._after_attach(state, obj)
+ def _update_impl(self, state, revert_deletion=False):
if state.key is None:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" %
state_str(state))
- if state.deleted:
- raise sa_exc.InvalidRequestError(
- "Instance '%s' has been deleted. Use the make_transient() "
- "function to send this object back to the transient state." %
- state_str(state)
- )
- self._before_attach(state, check_identity_map=False)
+ if state._deleted:
+ if revert_deletion:
+ if not state._attached:
+ return
+ del state._deleted
+ else:
+ raise sa_exc.InvalidRequestError(
+ "Instance '%s' has been deleted. "
+ "Use the make_transient() "
+ "function to send this object back "
+ "to the transient state." %
+ state_str(state)
+ )
+
+ obj = state.obj()
+ to_attach = self._before_attach(state, obj)
+
self._deleted.pop(state, None)
- if discard_existing:
+ if revert_deletion:
self.identity_map.replace(state)
else:
self.identity_map.add(state)
- self._attach(state)
+
+ if to_attach:
+ self._after_attach(state, obj)
+ elif revert_deletion:
+ self.dispatch.deleted_to_persistent(self, obj)
def _save_or_update_impl(self, state):
if state.key is None:
@@ -1845,17 +1881,6 @@ class Session(_SessionClassMethods):
else:
self._update_impl(state)
- def _delete_impl(self, state):
- if state in self._deleted:
- return
-
- if state.key is None:
- return
-
- self._attach(state, include_before=True)
- self._deleted[state] = state.obj()
- self.identity_map.add(state)
-
def enable_relationship_loading(self, obj):
"""Associate an object with this :class:`.Session` for related
object loading.
@@ -1908,40 +1933,35 @@ class Session(_SessionClassMethods):
"""
state = attributes.instance_state(obj)
- self._attach(state, include_before=True)
+ to_attach = self._before_attach(state, obj)
state._load_pending = True
+ if to_attach:
+ self._after_attach(state, obj)
- def _before_attach(self, state, check_identity_map=True):
- if state.session_id != self.hash_key and \
- self.dispatch.before_attach:
- self.dispatch.before_attach(self, state.obj())
-
- if check_identity_map and state.key and \
- state.key in self.identity_map and \
- not self.identity_map.contains_state(state):
- raise sa_exc.InvalidRequestError(
- "Can't attach instance "
- "%s; another instance with key %s is already "
- "present in this session." % (state_str(state), state.key))
+ def _before_attach(self, state, obj):
+ if state.session_id == self.hash_key:
+ return False
- if state.session_id and \
- state.session_id is not self.hash_key and \
- state.session_id in _sessions:
+ if state.session_id and state.session_id in _sessions:
raise sa_exc.InvalidRequestError(
"Object '%s' is already attached to session '%s' "
"(this is '%s')" % (state_str(state),
state.session_id, self.hash_key))
- def _attach(self, state, include_before=False):
+ self.dispatch.before_attach(self, obj)
+
+ return True
+
+ def _after_attach(self, state, obj):
+ state.session_id = self.hash_key
+ if state.modified and state._strong_obj is None:
+ state._strong_obj = obj
+ self.dispatch.after_attach(self, obj)
- if state.session_id != self.hash_key:
- if include_before:
- self._before_attach(state)
- state.session_id = self.hash_key
- if state.modified and state._strong_obj is None:
- state._strong_obj = state.obj()
- if self.dispatch.after_attach:
- self.dispatch.after_attach(self, state.obj())
+ if state.key:
+ self.dispatch.detached_to_persistent(self, obj)
+ else:
+ self.dispatch.transient_to_pending(self, obj)
def __contains__(self, instance):
"""Return True if the instance is associated with this session.
@@ -1983,7 +2003,7 @@ class Session(_SessionClassMethods):
For ``autocommit`` Sessions with no active manual transaction, flush()
will create a transaction on the fly that surrounds the entire set of
- operations int the flush.
+ operations into the flush.
:param objects: Optional; restricts the flush operation to operate
only on elements that are in the given collection.
@@ -2700,7 +2720,7 @@ def make_transient(instance):
state = attributes.instance_state(instance)
s = _state_session(state)
if s:
- s._expunge_state(state)
+ s._expunge_states([state])
# remove expired state
state.expired_attributes.clear()
@@ -2711,8 +2731,8 @@ def make_transient(instance):
if state.key:
del state.key
- if state.deleted:
- del state.deleted
+ if state._deleted:
+ del state._deleted
def make_transient_to_detached(instance):
@@ -2744,8 +2764,8 @@ def make_transient_to_detached(instance):
raise sa_exc.InvalidRequestError(
"Given object must be transient")
state.key = state.mapper._identity_key_from_state(state)
- if state.deleted:
- del state.deleted
+ if state._deleted:
+ del state._deleted
state._commit_all(state.dict)
state._expire_attributes(state.dict, state.unloaded)
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 6034e74de..b648ffa3b 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -14,6 +14,7 @@ defines a large part of the ORM's interactivity.
import weakref
from .. import util
+from .. import inspection
from . import exc as orm_exc, interfaces
from .path_registry import PathRegistry
from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
@@ -21,6 +22,7 @@ from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
from . import base
+@inspection._self_inspects
class InstanceState(interfaces.InspectionAttr):
"""tracks state information at the instance level.
@@ -56,7 +58,7 @@ class InstanceState(interfaces.InspectionAttr):
_strong_obj = None
modified = False
expired = False
- deleted = False
+ _deleted = False
_load_pending = False
is_instance = True
@@ -87,7 +89,6 @@ class InstanceState(interfaces.InspectionAttr):
see also the ``unmodified`` collection which is intersected
against this set when a refresh operation occurs."""
-
@util.memoized_property
def attrs(self):
"""Return a namespace representing each attribute on
@@ -133,16 +134,80 @@ class InstanceState(interfaces.InspectionAttr):
self._attached
@property
+ def deleted(self):
+ """Return true if the object is :term:`deleted`.
+
+ An object that is in the deleted state is guaranteed to
+ not be within the :attr:`.Session.identity_map` of its parent
+ :class:`.Session`; however if the session's transaction is rolled
+ back, the object will be restored to the persistent state and
+ the identity map.
+
+ .. note::
+
+ The :attr:`.InstanceState.deleted` attribute refers to a specific
+ state of the object that occurs between the "persistent" and
+ "detached" states; once the object is :term:`detached`, the
+ :attr:`.InstanceState.deleted` attribute **no longer returns
+ True**; in order to detect that a state was deleted, regardless
+ of whether or not the object is associated with a :class:`.Session`,
+ use the :attr:`.InstanceState.was_deleted` accessor.
+
+ .. versionadded: 1.1
+
+ .. seealso::
+
+ :ref:`session_object_states`
+
+ """
+ return self.key is not None and \
+ self._attached and self._deleted
+
+ @property
+ def was_deleted(self):
+ """Return True if this object is or was previously in the
+ "deleted" state and has not been reverted to persistent.
+
+ This flag returns True once the object was deleted in flush.
+ When the object is expunged from the session either explicitly
+ or via transaction commit and enters the "detached" state,
+ this flag will continue to report True.
+
+ .. versionadded:: 1.1 - added a local method form of
+ :func:`.orm.util.was_deleted`.
+
+ .. seealso::
+
+ :attr:`.InstanceState.deleted` - refers to the "deleted" state
+
+ :func:`.orm.util.was_deleted` - standalone function
+
+ :ref:`session_object_states`
+
+ """
+ return self._deleted
+
+ @property
def persistent(self):
"""Return true if the object is :term:`persistent`.
+ An object that is in the persistent state is guaranteed to
+ be within the :attr:`.Session.identity_map` of its parent
+ :class:`.Session`.
+
+ .. versionchanged:: 1.1 The :attr:`.InstanceState.persistent`
+ accessor no longer returns True for an object that was
+ "deleted" within a flush; use the :attr:`.InstanceState.deleted`
+ accessor to detect this state. This allows the "persistent"
+ state to guarantee membership in the identity map.
+
.. seealso::
:ref:`session_object_states`
"""
return self.key is not None and \
- self._attached
+ self._attached and not self._deleted
@property
def detached(self):
@@ -153,8 +218,7 @@ class InstanceState(interfaces.InspectionAttr):
:ref:`session_object_states`
"""
- return self.key is not None and \
- not self._attached
+ return self.key is not None and not self._attached
@property
@util.dependencies("sqlalchemy.orm.session")
@@ -241,8 +305,44 @@ class InstanceState(interfaces.InspectionAttr):
"""
return bool(self.key)
- def _detach(self):
- self.session_id = self._strong_obj = None
+ @classmethod
+ def _detach_states(self, states, session, to_transient=False):
+ persistent_to_detached = \
+ session.dispatch.persistent_to_detached or None
+ deleted_to_detached = \
+ session.dispatch.deleted_to_detached or None
+ pending_to_transient = \
+ session.dispatch.pending_to_transient or None
+ persistent_to_transient = \
+ session.dispatch.persistent_to_transient or None
+
+ for state in states:
+ deleted = state._deleted
+ pending = state.key is None
+ persistent = not pending and not deleted
+
+ state.session_id = None
+
+ if to_transient and state.key:
+ del state.key
+ if persistent:
+ if to_transient:
+ if persistent_to_transient is not None:
+ persistent_to_transient(session, state.obj())
+ elif persistent_to_detached is not None:
+ persistent_to_detached(session, state.obj())
+ elif deleted and deleted_to_detached is not None:
+ deleted_to_detached(session, state.obj())
+ elif pending and pending_to_transient is not None:
+ pending_to_transient(session, state.obj())
+
+ state._strong_obj = None
+
+ def _detach(self, session=None):
+ if session:
+ InstanceState._detach_states([self], session)
+ else:
+ self.session_id = self._strong_obj = None
def _dispose(self):
self._detach()
@@ -294,7 +394,7 @@ class InstanceState(interfaces.InspectionAttr):
return {}
def _initialize_instance(*mixed, **kwargs):
- self, instance, args = mixed[0], mixed[1], mixed[2:]
+ self, instance, args = mixed[0], mixed[1], mixed[2:] # noqa
manager = self.manager
manager.dispatch.init(self, args, kwargs)
@@ -374,12 +474,6 @@ class InstanceState(interfaces.InspectionAttr):
state_dict['manager'](self, inst, state_dict)
- def _initialize(self, key):
- """Set this attribute to an empty value or collection,
- based on the AttributeImpl in use."""
-
- self.manager.get_impl(key).initialize(self, self.dict)
-
def _reset(self, dict_, key):
"""Remove the given attribute and any
callables associated with it."""
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index 78e929345..67dac1ccc 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -346,7 +346,10 @@ class NoLoader(AbstractRelationshipLoader):
self, context, path, loadopt, mapper,
result, adapter, populators):
def invoke_no_load(state, dict_, row):
- state._initialize(self.key)
+ if self.uselist:
+ state.manager.get_impl(self.key).initialize(state, dict_)
+ else:
+ dict_[self.key] = None
populators["new"].append((self.key, invoke_no_load))
@@ -361,7 +364,8 @@ class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots):
__slots__ = (
'_lazywhere', '_rev_lazywhere', 'use_get', '_bind_to_col',
- '_equated_columns', '_rev_bind_to_col', '_rev_equated_columns')
+ '_equated_columns', '_rev_bind_to_col', '_rev_equated_columns',
+ '_simple_lazy_clause')
def __init__(self, parent):
super(LazyLoader, self).__init__(parent)
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
index cb7a5fef7..3467328e3 100644
--- a/lib/sqlalchemy/orm/strategy_options.py
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -180,7 +180,7 @@ class Load(Generative, MapperOption):
return path
def __str__(self):
- return "Load(strategy=%r)" % self.strategy
+ return "Load(strategy=%r)" % (self.strategy, )
def _coerce_strat(self, strategy):
if strategy is not None:
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 6d3869679..4351c8dc6 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -985,12 +985,19 @@ def was_deleted(object):
"""Return True if the given object was deleted
within a session flush.
+ This is regardless of whether or not the object is
+ persistent or detached.
+
.. versionadded:: 0.8.0
+ .. seealso::
+
+ :attr:`.InstanceState.was_deleted`
+
"""
state = attributes.instance_state(object)
- return state.deleted
+ return state.was_deleted
def randomize_unitofwork():
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index b38aefb3d..4dd954fc4 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -587,7 +587,12 @@ class _ConnectionRecord(object):
if recycle:
self.__close()
self.info.clear()
+
+ # ensure that if self.__connect() fails,
+ # we are not referring to the previous stale connection here
+ self.connection = None
self.connection = self.__connect()
+
if self.__pool.dispatch.connect:
self.__pool.dispatch.connect(self.connection, self)
return self.connection
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index e8b70061d..fa2cf2399 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -21,6 +21,8 @@ from .expression import (
Update,
alias,
and_,
+ any_,
+ all_,
asc,
between,
bindparam,
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index d2fa1d553..691195772 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -97,6 +97,8 @@ OPERATORS = {
operators.exists: 'EXISTS ',
operators.distinct_op: 'DISTINCT ',
operators.inv: 'NOT ',
+ operators.any_op: 'ANY ',
+ operators.all_op: 'ALL ',
# modifiers
operators.desc_op: ' DESC',
@@ -281,6 +283,8 @@ class _CompileLabel(visitors.Visitable):
def type(self):
return self.element.type
+ def self_group(self, **kw):
+ return self
class SQLCompiler(Compiled):
@@ -761,6 +765,9 @@ class SQLCompiler(Compiled):
x += "END"
return x
+ def visit_type_coerce(self, type_coerce, **kw):
+ return type_coerce.typed_expression._compiler_dispatch(self, **kw)
+
def visit_cast(self, cast, **kwargs):
return "CAST(%s AS %s)" % \
(cast.clause._compiler_dispatch(self, **kwargs),
@@ -768,7 +775,7 @@ class SQLCompiler(Compiled):
def visit_over(self, over, **kwargs):
return "%s OVER (%s)" % (
- over.func._compiler_dispatch(self, **kwargs),
+ over.element._compiler_dispatch(self, **kwargs),
' '.join(
'%s BY %s' % (word, clause._compiler_dispatch(self, **kwargs))
for word, clause in (
@@ -779,6 +786,12 @@ class SQLCompiler(Compiled):
)
)
+ def visit_withingroup(self, withingroup, **kwargs):
+ return "%s WITHIN GROUP (ORDER BY %s)" % (
+ withingroup.element._compiler_dispatch(self, **kwargs),
+ withingroup.order_by._compiler_dispatch(self, **kwargs)
+ )
+
def visit_funcfilter(self, funcfilter, **kwargs):
return "%s FILTER (WHERE %s)" % (
funcfilter.func._compiler_dispatch(self, **kwargs),
@@ -1270,9 +1283,6 @@ class SQLCompiler(Compiled):
return " AS " + alias_name_text
def _add_to_result_map(self, keyname, name, objects, type_):
- if not self.dialect.case_sensitive:
- keyname = keyname.lower()
-
self._result_columns.append((keyname, name, objects, type_))
def _label_select_column(self, select, column,
@@ -1812,6 +1822,22 @@ class SQLCompiler(Compiled):
join.onclause._compiler_dispatch(self, **kwargs)
)
+ def _setup_crud_hints(self, stmt, table_text):
+ dialect_hints = dict([
+ (table, hint_text)
+ for (table, dialect), hint_text in
+ stmt._hints.items()
+ if dialect in ('*', self.dialect.name)
+ ])
+ if stmt.table in dialect_hints:
+ table_text = self.format_from_hint_text(
+ table_text,
+ stmt.table,
+ dialect_hints[stmt.table],
+ True
+ )
+ return dialect_hints, table_text
+
def visit_insert(self, insert_stmt, **kw):
self.stack.append(
{'correlate_froms': set(),
@@ -1853,19 +1879,10 @@ class SQLCompiler(Compiled):
table_text = preparer.format_table(insert_stmt.table)
if insert_stmt._hints:
- dialect_hints = dict([
- (table, hint_text)
- for (table, dialect), hint_text in
- insert_stmt._hints.items()
- if dialect in ('*', self.dialect.name)
- ])
- if insert_stmt.table in dialect_hints:
- table_text = self.format_from_hint_text(
- table_text,
- insert_stmt.table,
- dialect_hints[insert_stmt.table],
- True
- )
+ dialect_hints, table_text = self._setup_crud_hints(
+ insert_stmt, table_text)
+ else:
+ dialect_hints = None
text += table_text
@@ -1957,19 +1974,8 @@ class SQLCompiler(Compiled):
crud_params = crud._get_crud_params(self, update_stmt, **kw)
if update_stmt._hints:
- dialect_hints = dict([
- (table, hint_text)
- for (table, dialect), hint_text in
- update_stmt._hints.items()
- if dialect in ('*', self.dialect.name)
- ])
- if update_stmt.table in dialect_hints:
- table_text = self.format_from_hint_text(
- table_text,
- update_stmt.table,
- dialect_hints[update_stmt.table],
- True
- )
+ dialect_hints, table_text = self._setup_crud_hints(
+ update_stmt, table_text)
else:
dialect_hints = None
@@ -2038,22 +2044,8 @@ class SQLCompiler(Compiled):
self, asfrom=True, iscrud=True)
if delete_stmt._hints:
- dialect_hints = dict([
- (table, hint_text)
- for (table, dialect), hint_text in
- delete_stmt._hints.items()
- if dialect in ('*', self.dialect.name)
- ])
- if delete_stmt.table in dialect_hints:
- table_text = self.format_from_hint_text(
- table_text,
- delete_stmt.table,
- dialect_hints[delete_stmt.table],
- True
- )
-
- else:
- dialect_hints = None
+ dialect_hints, table_text = self._setup_crud_hints(
+ delete_stmt, table_text)
text += table_text
@@ -2139,11 +2131,11 @@ class DDLCompiler(Compiled):
table = create.element
preparer = self.dialect.identifier_preparer
- text = "\n" + " ".join(['CREATE'] +
- table._prefixes +
- ['TABLE',
- preparer.format_table(table),
- "("])
+ text = "\nCREATE "
+ if table._prefixes:
+ text += " ".join(table._prefixes) + " "
+ text += "TABLE " + preparer.format_table(table) + " ("
+
separator = "\n"
# if only one primary key, specify it along with the column
@@ -2168,8 +2160,8 @@ class DDLCompiler(Compiled):
))
const = self.create_table_constraints(
- table, _include_foreign_key_constraints=
- create.include_foreign_key_constraints)
+ table, _include_foreign_key_constraints= # noqa
+ create.include_foreign_key_constraints)
if const:
text += ", \n\t" + const
@@ -2223,7 +2215,7 @@ class DDLCompiler(Compiled):
and (
not self.dialect.supports_alter or
not getattr(constraint, 'use_alter', False)
- )) if p is not None
+ )) if p is not None
)
def visit_drop_table(self, drop):
diff --git a/lib/sqlalchemy/sql/crud.py b/lib/sqlalchemy/sql/crud.py
index 2e39f6b36..e6f16b698 100644
--- a/lib/sqlalchemy/sql/crud.py
+++ b/lib/sqlalchemy/sql/crud.py
@@ -319,6 +319,7 @@ class _multiparam_column(elements.ColumnElement):
self.key = "%s_%d" % (original.key, index + 1)
self.original = original
self.default = original.default
+ self.type = original.type
def __eq__(self, other):
return isinstance(other, _multiparam_column) and \
diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py
index e77ad765c..68ea5624e 100644
--- a/lib/sqlalchemy/sql/default_comparator.py
+++ b/lib/sqlalchemy/sql/default_comparator.py
@@ -14,7 +14,8 @@ from . import operators
from .elements import BindParameter, True_, False_, BinaryExpression, \
Null, _const_expr, _clause_element_as_expr, \
ClauseList, ColumnElement, TextClause, UnaryExpression, \
- collate, _is_literal, _literal_as_text, ClauseElement, and_, or_
+ collate, _is_literal, _literal_as_text, ClauseElement, and_, or_, \
+ Slice, Visitable, _literal_as_binds
from .selectable import SelectBase, Alias, Selectable, ScalarSelect
@@ -161,6 +162,34 @@ def _in_impl(expr, op, seq_or_selectable, negate_op, **kw):
negate=negate_op)
+def _getitem_impl(expr, op, other, **kw):
+ if isinstance(expr.type, type_api.INDEXABLE):
+ if isinstance(other, slice):
+ if expr.type.zero_indexes:
+ other = slice(
+ other.start + 1,
+ other.stop + 1,
+ other.step
+ )
+ other = Slice(
+ _literal_as_binds(
+ other.start, name=expr.key, type_=type_api.INTEGERTYPE),
+ _literal_as_binds(
+ other.stop, name=expr.key, type_=type_api.INTEGERTYPE),
+ _literal_as_binds(
+ other.step, name=expr.key, type_=type_api.INTEGERTYPE)
+ )
+ else:
+ if expr.type.zero_indexes:
+ other += 1
+
+ other = _literal_as_binds(
+ other, name=expr.key, type_=type_api.INTEGERTYPE)
+ return _binary_operate(expr, op, other, **kw)
+ else:
+ _unsupported_impl(expr, op, other, **kw)
+
+
def _unsupported_impl(expr, op, *arg, **kw):
raise NotImplementedError("Operator '%s' is not supported on "
"this expression" % op.__name__)
@@ -260,7 +289,7 @@ operator_lookup = {
"between_op": (_between_impl, ),
"notbetween_op": (_between_impl, ),
"neg": (_neg_impl,),
- "getitem": (_unsupported_impl,),
+ "getitem": (_getitem_impl,),
"lshift": (_unsupported_impl,),
"rshift": (_unsupported_impl,),
}
@@ -280,7 +309,7 @@ def _check_literal(expr, operator, other):
if isinstance(other, (SelectBase, Alias)):
return other.as_scalar()
- elif not isinstance(other, (ColumnElement, TextClause)):
+ elif not isinstance(other, Visitable):
return expr._bind_param(operator, other)
else:
return other
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index 27ecce2b0..70046c66b 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -124,67 +124,6 @@ def literal(value, type_=None):
return BindParameter(None, value, type_=type_, unique=True)
-def type_coerce(expression, type_):
- """Associate a SQL expression with a particular type, without rendering
- ``CAST``.
-
- E.g.::
-
- from sqlalchemy import type_coerce
-
- stmt = select([type_coerce(log_table.date_string, StringDateTime())])
-
- The above construct will produce SQL that is usually otherwise unaffected
- by the :func:`.type_coerce` call::
-
- SELECT date_string FROM log
-
- However, when result rows are fetched, the ``StringDateTime`` type
- will be applied to result rows on behalf of the ``date_string`` column.
-
- A type that features bound-value handling will also have that behavior
- take effect when literal values or :func:`.bindparam` constructs are
- passed to :func:`.type_coerce` as targets.
- For example, if a type implements the :meth:`.TypeEngine.bind_expression`
- method or :meth:`.TypeEngine.bind_processor` method or equivalent,
- these functions will take effect at statement compilation/execution time
- when a literal value is passed, as in::
-
- # bound-value handling of MyStringType will be applied to the
- # literal value "some string"
- stmt = select([type_coerce("some string", MyStringType)])
-
- :func:`.type_coerce` is similar to the :func:`.cast` function,
- except that it does not render the ``CAST`` expression in the resulting
- statement.
-
- :param expression: A SQL expression, such as a :class:`.ColumnElement`
- expression or a Python string which will be coerced into a bound literal
- value.
-
- :param type_: A :class:`.TypeEngine` class or instance indicating
- the type to which the expression is coerced.
-
- .. seealso::
-
- :func:`.cast`
-
- """
- type_ = type_api.to_instance(type_)
-
- if hasattr(expression, '__clause_element__'):
- return type_coerce(expression.__clause_element__(), type_)
- elif isinstance(expression, BindParameter):
- bp = expression._clone()
- bp.type = type_
- return bp
- elif not isinstance(expression, Visitable):
- if expression is None:
- return Null()
- else:
- return literal(expression, type_=type_)
- else:
- return Label(None, expression, type_=type_)
def outparam(key, type_=None):
@@ -700,6 +639,8 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
self.type._type_affinity
is type_api.BOOLEANTYPE._type_affinity):
return AsBoolean(self, operators.istrue, operators.isfalse)
+ elif (against in (operators.any_op, operators.all_op)):
+ return Grouping(self)
else:
return self
@@ -715,7 +656,14 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
@util.memoized_property
def comparator(self):
- return self.type.comparator_factory(self)
+ try:
+ comparator_factory = self.type.comparator_factory
+ except AttributeError:
+ raise TypeError(
+ "Object %r associated with '.type' attribute "
+ "is not a TypeEngine class or object" % self.type)
+ else:
+ return comparator_factory(self)
def __getattr__(self, key):
try:
@@ -837,6 +785,16 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
else:
return False
+ def cast(self, type_):
+ """Produce a type cast, i.e. ``CAST(<expression> AS <type>)``.
+
+ This is a shortcut to the :func:`~.expression.cast` function.
+
+ .. versionadded:: 1.0.7
+
+ """
+ return Cast(self, type_)
+
def label(self, name):
"""Produce a column label, i.e. ``<columnname> AS <name>``.
@@ -1128,8 +1086,7 @@ class BindParameter(ColumnElement):
_compared_to_type.coerce_compared_value(
_compared_to_operator, value)
else:
- self.type = type_api._type_map.get(type(value),
- type_api.NULLTYPE)
+ self.type = type_api._resolve_value_to_type(value)
elif isinstance(type_, type):
self.type = type_()
else:
@@ -1144,8 +1101,7 @@ class BindParameter(ColumnElement):
cloned.callable = None
cloned.required = False
if cloned.type is type_api.NULLTYPE:
- cloned.type = type_api._type_map.get(type(value),
- type_api.NULLTYPE)
+ cloned.type = type_api._resolve_value_to_type(value)
return cloned
@property
@@ -1840,9 +1796,12 @@ class BooleanClauseList(ClauseList, ColumnElement):
def _construct(cls, operator, continue_on, skip_on, *clauses, **kw):
convert_clauses = []
- clauses = util.coerce_generator_arg(clauses)
+ clauses = [
+ _expression_literal_as_text(clause)
+ for clause in
+ util.coerce_generator_arg(clauses)
+ ]
for clause in clauses:
- clause = _expression_literal_as_text(clause)
if isinstance(clause, continue_on):
continue
@@ -2327,6 +2286,109 @@ class Cast(ColumnElement):
return self.clause._from_objects
+class TypeCoerce(ColumnElement):
+ """Represent a Python-side type-coercion wrapper.
+
+ :class:`.TypeCoerce` supplies the :func:`.expression.type_coerce`
+ function; see that function for usage details.
+
+ .. versionchanged:: 1.1 The :func:`.type_coerce` function now produces
+ a persistent :class:`.TypeCoerce` wrapper object rather than
+ translating the given object in place.
+
+ .. seealso::
+
+ :func:`.expression.type_coerce`
+
+ """
+
+ __visit_name__ = 'type_coerce'
+
+ def __init__(self, expression, type_):
+ """Associate a SQL expression with a particular type, without rendering
+ ``CAST``.
+
+ E.g.::
+
+ from sqlalchemy import type_coerce
+
+ stmt = select([
+ type_coerce(log_table.date_string, StringDateTime())
+ ])
+
+ The above construct will produce a :class:`.TypeCoerce` object, which
+ renders SQL that labels the expression, but otherwise does not
+ modify its value on the SQL side::
+
+ SELECT date_string AS anon_1 FROM log
+
+ When result rows are fetched, the ``StringDateTime`` type
+ will be applied to result rows on behalf of the ``date_string`` column.
+ The rationale for the "anon_1" label is so that the type-coerced
+ column remains separate in the list of result columns vs. other
+ type-coerced or direct values of the target column. In order to
+ provide a named label for the expression, use
+ :meth:`.ColumnElement.label`::
+
+ stmt = select([
+ type_coerce(
+ log_table.date_string, StringDateTime()).label('date')
+ ])
+
+
+ A type that features bound-value handling will also have that behavior
+ take effect when literal values or :func:`.bindparam` constructs are
+ passed to :func:`.type_coerce` as targets.
+ For example, if a type implements the
+ :meth:`.TypeEngine.bind_expression`
+ method or :meth:`.TypeEngine.bind_processor` method or equivalent,
+ these functions will take effect at statement compilation/execution
+ time when a literal value is passed, as in::
+
+ # bound-value handling of MyStringType will be applied to the
+ # literal value "some string"
+ stmt = select([type_coerce("some string", MyStringType)])
+
+ :func:`.type_coerce` is similar to the :func:`.cast` function,
+ except that it does not render the ``CAST`` expression in the resulting
+ statement.
+
+ :param expression: A SQL expression, such as a :class:`.ColumnElement`
+ expression or a Python string which will be coerced into a bound
+ literal value.
+
+ :param type_: A :class:`.TypeEngine` class or instance indicating
+ the type to which the expression is coerced.
+
+ .. seealso::
+
+ :func:`.cast`
+
+ """
+ self.type = type_api.to_instance(type_)
+ self.clause = _literal_as_binds(expression, type_=self.type)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.clause = clone(self.clause, **kw)
+ self.__dict__.pop('typed_expression', None)
+
+ def get_children(self, **kwargs):
+ return self.clause,
+
+ @property
+ def _from_objects(self):
+ return self.clause._from_objects
+
+ @util.memoized_property
+ def typed_expression(self):
+ if isinstance(self.clause, BindParameter):
+ bp = self.clause._clone()
+ bp.type = self.type
+ return bp
+ else:
+ return self.clause
+
+
class Extract(ColumnElement):
"""Represent a SQL EXTRACT clause, ``extract(field FROM expr)``."""
@@ -2668,6 +2730,91 @@ class UnaryExpression(ColumnElement):
return self
+class CollectionAggregate(UnaryExpression):
+ """Forms the basis for right-hand collection operator modifiers
+ ANY and ALL.
+
+ The ANY and ALL keywords are available in different ways on different
+ backends. On Postgresql, they only work for an ARRAY type. On
+ MySQL, they only work for subqueries.
+
+ """
+ @classmethod
+ def _create_any(cls, expr):
+ """Produce an ANY expression.
+
+ This may apply to an array type for some dialects (e.g. postgresql),
+ or to a subquery for others (e.g. mysql). e.g.::
+
+ # postgresql '5 = ANY (somearray)'
+ expr = 5 == any_(mytable.c.somearray)
+
+ # mysql '5 = ANY (SELECT value FROM table)'
+ expr = 5 == any_(select([table.c.value]))
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :func:`.expression.all_`
+
+ """
+
+ expr = _literal_as_binds(expr)
+
+ if expr.is_selectable and hasattr(expr, 'as_scalar'):
+ expr = expr.as_scalar()
+ expr = expr.self_group()
+ return CollectionAggregate(
+ expr, operator=operators.any_op,
+ type_=type_api.NULLTYPE, wraps_column_expression=False)
+
+ @classmethod
+ def _create_all(cls, expr):
+ """Produce an ALL expression.
+
+ This may apply to an array type for some dialects (e.g. postgresql),
+ or to a subquery for others (e.g. mysql). e.g.::
+
+ # postgresql '5 = ALL (somearray)'
+ expr = 5 == all_(mytable.c.somearray)
+
+ # mysql '5 = ALL (SELECT value FROM table)'
+ expr = 5 == all_(select([table.c.value]))
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :func:`.expression.any_`
+
+ """
+
+ expr = _literal_as_binds(expr)
+ if expr.is_selectable and hasattr(expr, 'as_scalar'):
+ expr = expr.as_scalar()
+ expr = expr.self_group()
+ return CollectionAggregate(
+ expr, operator=operators.all_op,
+ type_=type_api.NULLTYPE, wraps_column_expression=False)
+
+ # operate and reverse_operate are hardwired to
+ # dispatch onto the type comparator directly, so that we can
+ # ensure "reversed" behavior.
+ def operate(self, op, *other, **kwargs):
+ if not operators.is_comparison(op):
+ raise exc.ArgumentError(
+ "Only comparison operators may be used with ANY/ALL")
+ kwargs['reverse'] = True
+ return self.comparator.operate(operators.mirror(op), *other, **kwargs)
+
+ def reverse_operate(self, op, other, **kwargs):
+ # comparison operators should never call reverse_operate
+ assert not operators.is_comparison(op)
+ raise exc.ArgumentError(
+ "Only comparison operators may be used with ANY/ALL")
+
+
class AsBoolean(UnaryExpression):
def __init__(self, element, operator, negate):
@@ -2779,6 +2926,32 @@ class BinaryExpression(ColumnElement):
return super(BinaryExpression, self)._negate()
+class Slice(ColumnElement):
+ """Represent SQL for a Python array-slice object.
+
+ This is not a specific SQL construct at this level, but
+ may be interpreted by specific dialects, e.g. Postgresql.
+
+ """
+ __visit_name__ = 'slice'
+
+ def __init__(self, start, stop, step):
+ self.start = start
+ self.stop = stop
+ self.step = step
+ self.type = type_api.NULLTYPE
+
+ def self_group(self, against=None):
+ assert against is operator.getitem
+ return self
+
+
+class IndexExpression(BinaryExpression):
+ """Represent the class of expressions that are like an "index" operation.
+ """
+ pass
+
+
class Grouping(ColumnElement):
"""Represent a grouping within a column expression"""
@@ -2839,21 +3012,21 @@ class Over(ColumnElement):
order_by = None
partition_by = None
- def __init__(self, func, partition_by=None, order_by=None):
+ def __init__(self, element, partition_by=None, order_by=None):
"""Produce an :class:`.Over` object against a function.
Used against aggregate or so-called "window" functions,
for database backends that support window functions.
- E.g.::
+ :func:`~.expression.over` is usually called using
+ the :meth:`.FunctionElement.over` method, e.g.::
- from sqlalchemy import over
- over(func.row_number(), order_by='x')
+ func.row_number().over(order_by='x')
- Would produce "ROW_NUMBER() OVER(ORDER BY x)".
+ Would produce ``ROW_NUMBER() OVER(ORDER BY x)``.
- :param func: a :class:`.FunctionElement` construct, typically
- generated by :data:`~.expression.func`.
+ :param element: a :class:`.FunctionElement`, :class:`.WithinGroup`,
+ or other compatible construct.
:param partition_by: a column element or string, or a list
of such, that will be used as the PARTITION BY clause
of the OVER construct.
@@ -2866,8 +3039,14 @@ class Over(ColumnElement):
.. versionadded:: 0.7
+ .. seealso::
+
+ :data:`.expression.func`
+
+ :func:`.expression.within_group`
+
"""
- self.func = func
+ self.element = element
if order_by is not None:
self.order_by = ClauseList(
*util.to_list(order_by),
@@ -2877,17 +3056,29 @@ class Over(ColumnElement):
*util.to_list(partition_by),
_literal_as_text=_literal_as_label_reference)
+ @property
+ def func(self):
+ """the element referred to by this :class:`.Over`
+ clause.
+
+ .. deprecated:: 1.1 the ``func`` element has been renamed to
+ ``.element``. The two attributes are synonymous though
+ ``.func`` is read-only.
+
+ """
+ return self.element
+
@util.memoized_property
def type(self):
- return self.func.type
+ return self.element.type
def get_children(self, **kwargs):
return [c for c in
- (self.func, self.partition_by, self.order_by)
+ (self.element, self.partition_by, self.order_by)
if c is not None]
def _copy_internals(self, clone=_clone, **kw):
- self.func = clone(self.func, **kw)
+ self.element = clone(self.element, **kw)
if self.partition_by is not None:
self.partition_by = clone(self.partition_by, **kw)
if self.order_by is not None:
@@ -2897,7 +3088,106 @@ class Over(ColumnElement):
def _from_objects(self):
return list(itertools.chain(
*[c._from_objects for c in
- (self.func, self.partition_by, self.order_by)
+ (self.element, self.partition_by, self.order_by)
+ if c is not None]
+ ))
+
+
+class WithinGroup(ColumnElement):
+ """Represent a WITHIN GROUP (ORDER BY) clause.
+
+ This is a special operator against so-called
+ so-called "ordered set aggregate" and "hypothetical
+ set aggregate" functions, including ``percentile_cont()``,
+ ``rank()``, ``dense_rank()``, etc.
+
+ It's supported only by certain database backends, such as PostgreSQL,
+ Oracle and MS SQL Server.
+
+ The :class:`.WithinGroup` consturct extracts its type from the
+ method :meth:`.FunctionElement.within_group_type`. If this returns
+ ``None``, the function's ``.type`` is used.
+
+ """
+ __visit_name__ = 'withingroup'
+
+ order_by = None
+
+ def __init__(self, element, *order_by):
+ """Produce a :class:`.WithinGroup` object against a function.
+
+ Used against so-called "ordered set aggregate" and "hypothetical
+ set aggregate" functions, including :class:`.percentile_cont`,
+ :class:`.rank`, :class:`.dense_rank`, etc.
+
+ :func:`~.expression.within_group` is usually called using
+ the :meth:`.FunctionElement.within_group` method, e.g.::
+
+ from sqlalchemy import within_group
+ stmt = select([
+ department.c.id,
+ func.percentile_cont(0.5).within_group(
+ department.c.salary.desc()
+ )
+ ])
+
+ The above statement would produce SQL similar to
+ ``SELECT department.id, percentile_cont(0.5)
+ WITHIN GROUP (ORDER BY department.salary DESC)``.
+
+ :param element: a :class:`.FunctionElement` construct, typically
+ generated by :data:`~.expression.func`.
+ :param \*order_by: one or more column elements that will be used
+ as the ORDER BY clause of the WITHIN GROUP construct.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :data:`.expression.func`
+
+ :func:`.expression.over`
+
+ """
+ self.element = element
+ if order_by is not None:
+ self.order_by = ClauseList(
+ *util.to_list(order_by),
+ _literal_as_text=_literal_as_label_reference)
+
+ def over(self, partition_by=None, order_by=None):
+ """Produce an OVER clause against this :class:`.WithinGroup`
+ construct.
+
+ This function has the same signature as that of
+ :meth:`.FunctionElement.over`.
+
+ """
+ return Over(self, partition_by=partition_by, order_by=order_by)
+
+ @util.memoized_property
+ def type(self):
+ wgt = self.element.within_group_type(self)
+ if wgt is not None:
+ return wgt
+ else:
+ return self.element.type
+
+ def get_children(self, **kwargs):
+ return [c for c in
+ (self.func, self.order_by)
+ if c is not None]
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+ if self.order_by is not None:
+ self.order_by = clone(self.order_by, **kw)
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(
+ *[c._from_objects for c in
+ (self.element, self.order_by)
if c is not None]
))
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 74b827d7e..27fae8ca4 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -15,7 +15,7 @@ class.
"""
__all__ = [
- 'Alias', 'ClauseElement', 'ColumnCollection', 'ColumnElement',
+ 'Alias', 'Any', 'All', 'ClauseElement', 'ColumnCollection', 'ColumnElement',
'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select',
'Selectable', 'TableClause', 'Update', 'alias', 'and_', 'asc', 'between',
'bindparam', 'case', 'cast', 'column', 'delete', 'desc', 'distinct',
@@ -24,19 +24,19 @@ __all__ = [
'literal', 'literal_column', 'not_', 'null', 'nullsfirst', 'nullslast',
'or_', 'outparam', 'outerjoin', 'over', 'select', 'subquery',
'table', 'text',
- 'tuple_', 'type_coerce', 'union', 'union_all', 'update']
+ 'tuple_', 'type_coerce', 'union', 'union_all', 'update', 'within_group']
from .visitors import Visitable
from .functions import func, modifier, FunctionElement, Function
from ..util.langhelpers import public_factory
from .elements import ClauseElement, ColumnElement,\
- BindParameter, UnaryExpression, BooleanClauseList, \
+ BindParameter, CollectionAggregate, UnaryExpression, BooleanClauseList, \
Label, Cast, Case, ColumnClause, TextClause, Over, Null, \
True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
- Grouping, not_, \
+ Grouping, WithinGroup, not_, \
collate, literal_column, between,\
- literal, outparam, type_coerce, ClauseList, FunctionFilter
+ literal, outparam, TypeCoerce, ClauseList, FunctionFilter
from .elements import SavepointClause, RollbackToSavepointClause, \
ReleaseSavepointClause
@@ -57,6 +57,8 @@ from .dml import Insert, Update, Delete, UpdateBase, ValuesBase
# the functions to be available in the sqlalchemy.sql.* namespace and
# to be auto-cross-documenting from the function to the class itself.
+all_ = public_factory(CollectionAggregate._create_all, ".expression.all_")
+any_ = public_factory(CollectionAggregate._create_any, ".expression.any_")
and_ = public_factory(BooleanClauseList.and_, ".expression.and_")
or_ = public_factory(BooleanClauseList.or_, ".expression.or_")
bindparam = public_factory(BindParameter, ".expression.bindparam")
@@ -65,6 +67,7 @@ text = public_factory(TextClause._create_text, ".expression.text")
table = public_factory(TableClause, ".expression.table")
column = public_factory(ColumnClause, ".expression.column")
over = public_factory(Over, ".expression.over")
+within_group = public_factory(WithinGroup, ".expression.within_group")
label = public_factory(Label, ".expression.label")
case = public_factory(Case, ".expression.case")
cast = public_factory(Cast, ".expression.cast")
@@ -89,6 +92,7 @@ asc = public_factory(UnaryExpression._create_asc, ".expression.asc")
desc = public_factory(UnaryExpression._create_desc, ".expression.desc")
distinct = public_factory(
UnaryExpression._create_distinct, ".expression.distinct")
+type_coerce = public_factory(TypeCoerce, ".expression.type_coerce")
true = public_factory(True_._instance, ".expression.true")
false = public_factory(False_._instance, ".expression.false")
null = public_factory(Null._instance, ".expression.null")
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index 538a2c549..6cfbd12b3 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -12,9 +12,9 @@ from . import sqltypes, schema
from .base import Executable, ColumnCollection
from .elements import ClauseList, Cast, Extract, _literal_as_binds, \
literal_column, _type_from_args, ColumnElement, _clone,\
- Over, BindParameter, FunctionFilter
+ Over, BindParameter, FunctionFilter, Grouping, WithinGroup
from .selectable import FromClause, Select, Alias
-
+from . import util as sqlutil
from . import operators
from .visitors import VisitableType
from .. import util
@@ -116,6 +116,21 @@ class FunctionElement(Executable, ColumnElement, FromClause):
"""
return Over(self, partition_by=partition_by, order_by=order_by)
+ def within_group(self, *order_by):
+ """Produce a WITHIN GROUP (ORDER BY expr) clause against this function.
+
+ Used against so-called "ordered set aggregate" and "hypothetical
+ set aggregate" functions, including :class:`.percentile_cont`,
+ :class:`.rank`, :class:`.dense_rank`, etc.
+
+ See :func:`~.expression.within_group` for a full description.
+
+ .. versionadded:: 1.1
+
+
+ """
+ return WithinGroup(self, *order_by)
+
def filter(self, *criterion):
"""Produce a FILTER clause against this function.
@@ -157,6 +172,18 @@ class FunctionElement(Executable, ColumnElement, FromClause):
self._reset_exported()
FunctionElement.clauses._reset(self)
+ def within_group_type(self, within_group):
+ """For types that define their return type as based on the criteria
+ within a WITHIN GROUP (ORDER BY) expression, called by the
+ :class:`.WithinGroup` construct.
+
+ Returns None by default, in which case the function's normal ``.type``
+ is used.
+
+ """
+
+ return None
+
def alias(self, name=None, flat=False):
"""Produce a :class:`.Alias` construct against this
:class:`.FunctionElement`.
@@ -233,6 +260,16 @@ class FunctionElement(Executable, ColumnElement, FromClause):
return BindParameter(None, obj, _compared_to_operator=operator,
_compared_to_type=self.type, unique=True)
+ def self_group(self, against=None):
+ # for the moment, we are parenthesizing all array-returning
+ # expressions against getitem. This may need to be made
+ # more portable if in the future we support other DBs
+ # besides postgresql.
+ if against is operators.getitem:
+ return Grouping(self)
+ else:
+ return super(FunctionElement, self).self_group(against=against)
+
class _FunctionGenerator(object):
"""Generate :class:`.Function` objects based on getattr calls."""
@@ -483,7 +520,7 @@ class GenericFunction(util.with_metaclass(_GenericMeta, Function)):
def __init__(self, *args, **kwargs):
parsed_args = kwargs.pop('_parsed_args', None)
if parsed_args is None:
- parsed_args = [_literal_as_binds(c) for c in args]
+ parsed_args = [_literal_as_binds(c, self.name) for c in args]
self.packagenames = []
self._bind = kwargs.get('bind', None)
self.clause_expr = ClauseList(
@@ -528,10 +565,10 @@ class ReturnTypeFromArgs(GenericFunction):
"""Define a function whose return type is the same as its arguments."""
def __init__(self, *args, **kwargs):
- args = [_literal_as_binds(c) for c in args]
+ args = [_literal_as_binds(c, self.name) for c in args]
kwargs.setdefault('type_', _type_from_args(args))
kwargs['_parsed_args'] = args
- GenericFunction.__init__(self, *args, **kwargs)
+ super(ReturnTypeFromArgs, self).__init__(*args, **kwargs)
class coalesce(ReturnTypeFromArgs):
@@ -579,7 +616,7 @@ class count(GenericFunction):
def __init__(self, expression=None, **kwargs):
if expression is None:
expression = literal_column('*')
- GenericFunction.__init__(self, expression, **kwargs)
+ super(count, self).__init__(expression, **kwargs)
class current_date(AnsiFunction):
@@ -616,3 +653,150 @@ class sysdate(AnsiFunction):
class user(AnsiFunction):
type = sqltypes.String
+
+
+class array_agg(GenericFunction):
+ """support for the ARRAY_AGG function.
+
+ The ``func.array_agg(expr)`` construct returns an expression of
+ type :class:`.Array`.
+
+ e.g.::
+
+ stmt = select([func.array_agg(table.c.values)[2:5]])
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :func:`.postgresql.array_agg` - PostgreSQL-specific version that
+ returns :class:`.ARRAY`, which has PG-specific operators added.
+
+ """
+
+ type = sqltypes.Array
+
+ def __init__(self, *args, **kwargs):
+ args = [_literal_as_binds(c) for c in args]
+ kwargs.setdefault('type_', self.type(_type_from_args(args)))
+ kwargs['_parsed_args'] = args
+ super(array_agg, self).__init__(*args, **kwargs)
+
+
+class OrderedSetAgg(GenericFunction):
+ """Define a function where the return type is based on the sort
+ expression type as defined by the expression passed to the
+ :meth:`.FunctionElement.within_group` method."""
+
+ array_for_multi_clause = False
+
+ def within_group_type(self, within_group):
+ func_clauses = self.clause_expr.element
+ order_by = sqlutil.unwrap_order_by(within_group.order_by)
+ if self.array_for_multi_clause and len(func_clauses.clauses) > 1:
+ return sqltypes.Array(order_by[0].type)
+ else:
+ return order_by[0].type
+
+
+class mode(OrderedSetAgg):
+ """implement the ``mode`` ordered-set aggregate function.
+
+ This function must be used with the :meth:`.FunctionElement.within_group`
+ modifier to supply a sort expression to operate upon.
+
+ The return type of this function is the same as the sort expression.
+
+ .. versionadded:: 1.1
+
+ """
+
+
+class percentile_cont(OrderedSetAgg):
+ """implement the ``percentile_cont`` ordered-set aggregate function.
+
+ This function must be used with the :meth:`.FunctionElement.within_group`
+ modifier to supply a sort expression to operate upon.
+
+ The return type of this function is the same as the sort expression,
+ or if the arguments are an array, an :class:`.Array` of the sort
+ expression's type.
+
+ .. versionadded:: 1.1
+
+ """
+
+ array_for_multi_clause = True
+
+
+class percentile_disc(OrderedSetAgg):
+ """implement the ``percentile_disc`` ordered-set aggregate function.
+
+ This function must be used with the :meth:`.FunctionElement.within_group`
+ modifier to supply a sort expression to operate upon.
+
+ The return type of this function is the same as the sort expression,
+ or if the arguments are an array, an :class:`.Array` of the sort
+ expression's type.
+
+ .. versionadded:: 1.1
+
+ """
+
+ array_for_multi_clause = True
+
+
+class rank(GenericFunction):
+ """Implement the ``rank`` hypothetical-set aggregate function.
+
+ This function must be used with the :meth:`.FunctionElement.within_group`
+ modifier to supply a sort expression to operate upon.
+
+ The return type of this function is :class:`.Integer`.
+
+ .. versionadded:: 1.1
+
+ """
+ type = sqltypes.Integer()
+
+
+class dense_rank(GenericFunction):
+ """Implement the ``dense_rank`` hypothetical-set aggregate function.
+
+ This function must be used with the :meth:`.FunctionElement.within_group`
+ modifier to supply a sort expression to operate upon.
+
+ The return type of this function is :class:`.Integer`.
+
+ .. versionadded:: 1.1
+
+ """
+ type = sqltypes.Integer()
+
+
+class percent_rank(GenericFunction):
+ """Implement the ``percent_rank`` hypothetical-set aggregate function.
+
+ This function must be used with the :meth:`.FunctionElement.within_group`
+ modifier to supply a sort expression to operate upon.
+
+ The return type of this function is :class:`.Numeric`.
+
+ .. versionadded:: 1.1
+
+ """
+ type = sqltypes.Numeric()
+
+
+class cume_dist(GenericFunction):
+ """Implement the ``cume_dist`` hypothetical-set aggregate function.
+
+ This function must be used with the :meth:`.FunctionElement.within_group`
+ modifier to supply a sort expression to operate upon.
+
+ The return type of this function is :class:`.Numeric`.
+
+ .. versionadded:: 1.1
+
+ """
+ type = sqltypes.Numeric()
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index 17a9d3086..da3576466 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -214,10 +214,13 @@ class custom_op(object):
"""
__name__ = 'custom_op'
- def __init__(self, opstring, precedence=0, is_comparison=False):
+ def __init__(
+ self, opstring, precedence=0, is_comparison=False,
+ natural_self_precedent=False):
self.opstring = opstring
self.precedence = precedence
self.is_comparison = is_comparison
+ self.natural_self_precedent = natural_self_precedent
def __eq__(self, other):
return isinstance(other, custom_op) and \
@@ -619,6 +622,24 @@ class ColumnOperators(Operators):
"""
return self.operate(distinct_op)
+ def any_(self):
+ """Produce a :func:`~.expression.any_` clause against the
+ parent object.
+
+ .. versionadded:: 1.1
+
+ """
+ return self.operate(any_op)
+
+ def all_(self):
+ """Produce a :func:`~.expression.all_` clause against the
+ parent object.
+
+ .. versionadded:: 1.1
+
+ """
+ return self.operate(all_op)
+
def __add__(self, other):
"""Implement the ``+`` operator.
@@ -752,6 +773,14 @@ def distinct_op(a):
return a.distinct()
+def any_op(a):
+ return a.any_()
+
+
+def all_op(a):
+ return a.all_()
+
+
def startswith_op(a, b, escape=None):
return a.startswith(b, escape=escape)
@@ -826,6 +855,28 @@ def is_ordering_modifier(op):
return op in (asc_op, desc_op,
nullsfirst_op, nullslast_op)
+
+def is_natural_self_precedent(op):
+ return op in _natural_self_precedent or \
+ isinstance(op, custom_op) and op.natural_self_precedent
+
+_mirror = {
+ gt: lt,
+ ge: le,
+ lt: gt,
+ le: ge
+}
+
+
+def mirror(op):
+ """rotate a comparison operator 180 degrees.
+
+ Note this is not the same as negation.
+
+ """
+ return _mirror.get(op, op)
+
+
_associative = _commutative.union([concat_op, and_, or_])
_natural_self_precedent = _associative.union([getitem])
@@ -834,12 +885,15 @@ parenthesize (a op b).
"""
+
_asbool = util.symbol('_asbool', canonical=-10)
_smallest = util.symbol('_smallest', canonical=-100)
_largest = util.symbol('_largest', canonical=100)
_PRECEDENCE = {
from_: 15,
+ any_op: 15,
+ all_op: 15,
getitem: 15,
mul: 8,
truediv: 8,
@@ -893,7 +947,7 @@ _PRECEDENCE = {
def is_precedent(operator, against):
- if operator is against and operator in _natural_self_precedent:
+ if operator is against and is_natural_self_precedent(operator):
return False
else:
return (_PRECEDENCE.get(operator,
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index 245c54817..73341053d 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -224,7 +224,7 @@ class HasSuffixes(object):
stmt = select([col1, col2]).cte().suffix_with(
"cycle empno set y_cycle to 1 default 0", dialect="oracle")
- Multiple prefixes can be specified by multiple calls
+ Multiple suffixes can be specified by multiple calls
to :meth:`.suffix_with`.
:param \*expr: textual or :class:`.ClauseElement` construct which
@@ -1101,6 +1101,14 @@ class Alias(FromClause):
or 'anon'))
self.name = name
+ def self_group(self, target=None):
+ if isinstance(target, CompoundSelect) and \
+ isinstance(self.original, Select) and \
+ self.original._needs_parens_for_grouping():
+ return FromGrouping(self)
+
+ return super(Alias, self).self_group(target)
+
@property
def description(self):
if util.py3k:
@@ -3208,6 +3216,13 @@ class Select(HasPrefixes, HasSuffixes, GenerativeSelect):
return None
return None
+ def _needs_parens_for_grouping(self):
+ return (
+ self._limit_clause is not None or
+ self._offset_clause is not None or
+ bool(self._order_by_clause.clauses)
+ )
+
def self_group(self, against=None):
"""return a 'grouping' construct as per the ClauseElement
specification.
@@ -3217,7 +3232,8 @@ class Select(HasPrefixes, HasSuffixes, GenerativeSelect):
expressions and should not require explicit use.
"""
- if isinstance(against, CompoundSelect):
+ if isinstance(against, CompoundSelect) and \
+ not self._needs_parens_for_grouping():
return self
return FromGrouping(self)
diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py
index 7e2e601e2..4abb9b15a 100644
--- a/lib/sqlalchemy/sql/sqltypes.py
+++ b/lib/sqlalchemy/sql/sqltypes.py
@@ -13,10 +13,11 @@ import datetime as dt
import codecs
from .type_api import TypeEngine, TypeDecorator, to_instance
-from .elements import quoted_name, type_coerce, _defer_name
+from .elements import quoted_name, TypeCoerce as type_coerce, _defer_name
from .. import exc, util, processors
from .base import _bind_or_error, SchemaEventTarget
from . import operators
+from .. import inspection
from .. import event
from ..util import pickle
import decimal
@@ -68,7 +69,39 @@ class Concatenable(object):
)):
return operators.concat_op, self.expr.type
else:
- return op, self.expr.type
+ return super(Concatenable.Comparator, self)._adapt_expression(
+ op, other_comparator)
+
+ comparator_factory = Comparator
+
+
+class Indexable(object):
+ """A mixin that marks a type as supporting indexing operations,
+ such as array or JSON structures.
+
+
+ .. versionadded:: 1.1.0
+
+
+ """
+
+ zero_indexes = False
+ """if True, Python zero-based indexes should be interpreted as one-based
+ on the SQL expression side."""
+
+ class Comparator(TypeEngine.Comparator):
+
+ def _setup_getitem(self, index):
+ raise NotImplementedError()
+
+ def __getitem__(self, index):
+ operator, adjusted_right_expr, result_type = \
+ self._setup_getitem(index)
+ return self.operate(
+ operator,
+ adjusted_right_expr,
+ result_type=result_type
+ )
comparator_factory = Comparator
@@ -215,9 +248,6 @@ class String(Concatenable, TypeEngine):
self.convert_unicode != 'force_nocheck'
)
if needs_convert:
- to_unicode = processors.to_unicode_processor_factory(
- dialect.encoding, self.unicode_error)
-
if needs_isinstance:
return processors.to_conditional_unicode_processor_factory(
dialect.encoding, self.unicode_error)
@@ -1466,6 +1496,246 @@ class Interval(_DateAffinity, TypeDecorator):
return self.impl.coerce_compared_value(op, value)
+class Array(Indexable, Concatenable, TypeEngine):
+ """Represent a SQL Array type.
+
+ .. note:: This type serves as the basis for all ARRAY operations.
+ However, currently **only the Postgresql backend has support
+ for SQL arrays in SQLAlchemy**. It is recommended to use the
+ :class:`.postgresql.ARRAY` type directly when using ARRAY types
+ with PostgreSQL, as it provides additional operators specific
+ to that backend.
+
+ :class:`.Array` is part of the Core in support of various SQL standard
+ functions such as :class:`.array_agg` which explicitly involve arrays;
+ however, with the exception of the PostgreSQL backend and possibly
+ some third-party dialects, no other SQLAlchemy built-in dialect has
+ support for this type.
+
+ An :class:`.Array` type is constructed given the "type"
+ of element::
+
+ mytable = Table("mytable", metadata,
+ Column("data", Array(Integer))
+ )
+
+ The above type represents an N-dimensional array,
+ meaning a supporting backend such as Postgresql will interpret values
+ with any number of dimensions automatically. To produce an INSERT
+ construct that passes in a 1-dimensional array of integers::
+
+ connection.execute(
+ mytable.insert(),
+ data=[1,2,3]
+ )
+
+ The :class:`.Array` type can be constructed given a fixed number
+ of dimensions::
+
+ mytable = Table("mytable", metadata,
+ Column("data", Array(Integer, dimensions=2))
+ )
+
+ Sending a number of dimensions is optional, but recommended if the
+ datatype is to represent arrays of more than one dimension. This number
+ is used:
+
+ * When emitting the type declaration itself to the database, e.g.
+ ``INTEGER[][]``
+
+ * When translating Python values to database values, and vice versa, e.g.
+ an ARRAY of :class:`.Unicode` objects uses this number to efficiently
+ access the string values inside of array structures without resorting
+ to per-row type inspection
+
+ * When used with the Python ``getitem`` accessor, the number of dimensions
+ serves to define the kind of type that the ``[]`` operator should
+ return, e.g. for an ARRAY of INTEGER with two dimensions::
+
+ >>> expr = table.c.column[5] # returns ARRAY(Integer, dimensions=1)
+ >>> expr = expr[6] # returns Integer
+
+ For 1-dimensional arrays, an :class:`.Array` instance with no
+ dimension parameter will generally assume single-dimensional behaviors.
+
+ SQL expressions of type :class:`.Array` have support for "index" and
+ "slice" behavior. The Python ``[]`` operator works normally here, given
+ integer indexes or slices. Arrays default to 1-based indexing.
+ The operator produces binary expression
+ constructs which will produce the appropriate SQL, both for
+ SELECT statements::
+
+ select([mytable.c.data[5], mytable.c.data[2:7]])
+
+ as well as UPDATE statements when the :meth:`.Update.values` method
+ is used::
+
+ mytable.update().values({
+ mytable.c.data[5]: 7,
+ mytable.c.data[2:7]: [1, 2, 3]
+ })
+
+ The :class:`.Array` type also provides for the operators
+ :meth:`.Array.Comparator.any` and :meth:`.Array.Comparator.all`.
+ The PostgreSQL-specific version of :class:`.Array` also provides additional
+ operators.
+
+ .. versionadded:: 1.1.0
+
+ .. seealso::
+
+ :class:`.postgresql.ARRAY`
+
+ """
+ __visit_name__ = 'ARRAY'
+
+ class Comparator(Indexable.Comparator, Concatenable.Comparator):
+
+ """Define comparison operations for :class:`.Array`.
+
+ More operators are available on the dialect-specific form
+ of this type. See :class:`.postgresql.ARRAY.Comparator`.
+
+ """
+
+ def _setup_getitem(self, index):
+ if isinstance(index, slice):
+ return_type = self.type
+ elif self.type.dimensions is None or self.type.dimensions == 1:
+ return_type = self.type.item_type
+ else:
+ adapt_kw = {'dimensions': self.type.dimensions - 1}
+ return_type = self.type.adapt(self.type.__class__, **adapt_kw)
+
+ return operators.getitem, index, return_type
+
+ @util.dependencies("sqlalchemy.sql.elements")
+ def any(self, elements, other, operator=None):
+ """Return ``other operator ANY (array)`` clause.
+
+ Argument places are switched, because ANY requires array
+ expression to be on the right hand-side.
+
+ E.g.::
+
+ from sqlalchemy.sql import operators
+
+ conn.execute(
+ select([table.c.data]).where(
+ table.c.data.any(7, operator=operators.lt)
+ )
+ )
+
+ :param other: expression to be compared
+ :param operator: an operator object from the
+ :mod:`sqlalchemy.sql.operators`
+ package, defaults to :func:`.operators.eq`.
+
+ .. seealso::
+
+ :func:`.sql.expression.any_`
+
+ :meth:`.Array.Comparator.all`
+
+ """
+ operator = operator if operator else operators.eq
+ return operator(
+ elements._literal_as_binds(other),
+ elements.CollectionAggregate._create_any(self.expr)
+ )
+
+ @util.dependencies("sqlalchemy.sql.elements")
+ def all(self, elements, other, operator=None):
+ """Return ``other operator ALL (array)`` clause.
+
+ Argument places are switched, because ALL requires array
+ expression to be on the right hand-side.
+
+ E.g.::
+
+ from sqlalchemy.sql import operators
+
+ conn.execute(
+ select([table.c.data]).where(
+ table.c.data.all(7, operator=operators.lt)
+ )
+ )
+
+ :param other: expression to be compared
+ :param operator: an operator object from the
+ :mod:`sqlalchemy.sql.operators`
+ package, defaults to :func:`.operators.eq`.
+
+ .. seealso::
+
+ :func:`.sql.expression.all_`
+
+ :meth:`.Array.Comparator.any`
+
+ """
+ operator = operator if operator else operators.eq
+ return operator(
+ elements._literal_as_binds(other),
+ elements.CollectionAggregate._create_all(self.expr)
+ )
+
+ comparator_factory = Comparator
+
+ def __init__(self, item_type, as_tuple=False, dimensions=None,
+ zero_indexes=False):
+ """Construct an :class:`.Array`.
+
+ E.g.::
+
+ Column('myarray', Array(Integer))
+
+ Arguments are:
+
+ :param item_type: The data type of items of this array. Note that
+ dimensionality is irrelevant here, so multi-dimensional arrays like
+ ``INTEGER[][]``, are constructed as ``Array(Integer)``, not as
+ ``Array(Array(Integer))`` or such.
+
+ :param as_tuple=False: Specify whether return results
+ should be converted to tuples from lists. This parameter is
+ not generally needed as a Python list corresponds well
+ to a SQL array.
+
+ :param dimensions: if non-None, the ARRAY will assume a fixed
+ number of dimensions. This impacts how the array is declared
+ on the database, how it goes about interpreting Python and
+ result values, as well as how expression behavior in conjunction
+ with the "getitem" operator works. See the description at
+ :class:`.Array` for additional detail.
+
+ :param zero_indexes=False: when True, index values will be converted
+ between Python zero-based and SQL one-based indexes, e.g.
+ a value of one will be added to all index values before passing
+ to the database.
+
+ """
+ if isinstance(item_type, Array):
+ raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
+ "handles multi-dimensional arrays of basetype")
+ if isinstance(item_type, type):
+ item_type = item_type()
+ self.item_type = item_type
+ self.as_tuple = as_tuple
+ self.dimensions = dimensions
+ self.zero_indexes = zero_indexes
+
+ @property
+ def hashable(self):
+ return self.as_tuple
+
+ @property
+ def python_type(self):
+ return list
+
+ def compare_values(self, x, y):
+ return x == y
+
+
class REAL(Float):
"""The SQL REAL type."""
@@ -1648,6 +1918,8 @@ class NullType(TypeEngine):
_isnull = True
+ hashable = False
+
def literal_processor(self, dialect):
def process(value):
return "NULL"
@@ -1704,6 +1976,26 @@ else:
_type_map[unicode] = Unicode()
_type_map[str] = String()
+_type_map_get = _type_map.get
+
+
+def _resolve_value_to_type(value):
+ _result_type = _type_map_get(type(value), False)
+ if _result_type is False:
+ # use inspect() to detect SQLAlchemy built-in
+ # objects.
+ insp = inspection.inspect(value, False)
+ if (
+ insp is not None and
+ # foil mock.Mock() and other impostors by ensuring
+ # the inspection target itself self-inspects
+ insp.__class__ in inspection._registrars
+ ):
+ raise exc.ArgumentError(
+ "Object %r is not legal as a SQL literal value" % value)
+ return NULLTYPE
+ else:
+ return _result_type
# back-assign to type_api
from . import type_api
@@ -1712,6 +2004,6 @@ type_api.STRINGTYPE = STRINGTYPE
type_api.INTEGERTYPE = INTEGERTYPE
type_api.NULLTYPE = NULLTYPE
type_api.MATCHTYPE = MATCHTYPE
-type_api._type_map = _type_map
-
+type_api.INDEXABLE = Indexable
+type_api._resolve_value_to_type = _resolve_value_to_type
TypeEngine.Comparator.BOOLEANTYPE = BOOLEANTYPE
diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py
index a55eed981..3b5391234 100644
--- a/lib/sqlalchemy/sql/type_api.py
+++ b/lib/sqlalchemy/sql/type_api.py
@@ -13,6 +13,7 @@
from .. import exc, util
from . import operators
from .visitors import Visitable, VisitableType
+from .base import SchemaEventTarget
# these are back-assigned by sqltypes.
BOOLEANTYPE = None
@@ -20,6 +21,8 @@ INTEGERTYPE = None
NULLTYPE = None
STRINGTYPE = None
MATCHTYPE = None
+INDEXABLE = None
+_resolve_value_to_type = None
class TypeEngine(Visitable):
@@ -90,7 +93,7 @@ class TypeEngine(Visitable):
boolean comparison or special SQL keywords like MATCH or BETWEEN.
"""
- return op, other_comparator.type
+ return op, self.type
def __reduce__(self):
return _reconstitute_comparator, (self.expr, )
@@ -128,6 +131,76 @@ class TypeEngine(Visitable):
"""
+ should_evaluate_none = False
+ """If True, the Python constant ``None`` is considered to be handled
+ explicitly by this type.
+
+ The ORM uses this flag to indicate that a positive value of ``None``
+ is passed to the column in an INSERT statement, rather than omitting
+ the column from the INSERT statement which has the effect of firing
+ off column-level defaults. It also allows types which have special
+ behavior for Python None, such as a JSON type, to indicate that
+ they'd like to handle the None value explicitly.
+
+ To set this flag on an existing type, use the
+ :meth:`.TypeEngine.evaluates_none` method.
+
+ .. seealso::
+
+ :meth:`.TypeEngine.evaluates_none`
+
+ .. versionadded:: 1.1
+
+
+ """
+
+ def evaluates_none(self):
+ """Return a copy of this type which has the :attr:`.should_evaluate_none`
+ flag set to True.
+
+ E.g.::
+
+ Table(
+ 'some_table', metadata,
+ Column(
+ String(50).evaluates_none(),
+ nullable=True,
+ server_default='no value')
+ )
+
+ The ORM uses this flag to indicate that a positive value of ``None``
+ is passed to the column in an INSERT statement, rather than omitting
+ the column from the INSERT statement which has the effect of firing
+ off column-level defaults. It also allows for types which have
+ special behavior associated with the Python None value to indicate
+ that the value doesn't necessarily translate into SQL NULL; a
+ prime example of this is a JSON type which may wish to persist the
+ JSON value ``'null'``.
+
+ In all cases, the actual NULL SQL value can be always be
+ persisted in any column by using
+ the :obj:`~.expression.null` SQL construct in an INSERT statement
+ or associated with an ORM-mapped attribute.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`session_forcing_null` - in the ORM documentation
+
+ :paramref:`.postgresql.JSON.none_as_null` - Postgresql JSON
+ interaction with this flag.
+
+ :attr:`.TypeEngine.should_evaluate_none` - class-level flag
+
+ """
+ typ = self.copy()
+ typ.should_evaluate_none = True
+ return typ
+
+ def copy(self, **kw):
+ return self.adapt(self.__class__)
+
def compare_against_backend(self, dialect, conn_type):
"""Compare this type against the given backend type.
@@ -440,7 +513,7 @@ class TypeEngine(Visitable):
end-user customization of this behavior.
"""
- _coerced_type = _type_map.get(type(value), NULLTYPE)
+ _coerced_type = _resolve_value_to_type(value)
if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
is self._type_affinity:
return self
@@ -577,7 +650,7 @@ class UserDefinedType(util.with_metaclass(VisitableCheckKWArg, TypeEngine)):
return self
-class TypeDecorator(TypeEngine):
+class TypeDecorator(SchemaEventTarget, TypeEngine):
"""Allows the creation of types which add additional functionality
to an existing type.
@@ -757,6 +830,18 @@ class TypeDecorator(TypeEngine):
"""
return self.impl._type_affinity
+ def _set_parent(self, column):
+ """Support SchemaEentTarget"""
+
+ if isinstance(self.impl, SchemaEventTarget):
+ self.impl._set_parent(column)
+
+ def _set_parent_with_dispatch(self, parent):
+ """Support SchemaEentTarget"""
+
+ if isinstance(self.impl, SchemaEventTarget):
+ self.impl._set_parent_with_dispatch(parent)
+
def type_engine(self, dialect):
"""Return a dialect-specific :class:`.TypeEngine` instance
for this :class:`.TypeDecorator`.
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index 8f502fc86..cbd74faac 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -154,6 +154,7 @@ def unwrap_order_by(clause):
without DESC/ASC/NULLS FIRST/NULLS LAST"""
cols = util.column_set()
+ result = []
stack = deque([clause])
while stack:
t = stack.popleft()
@@ -166,11 +167,13 @@ def unwrap_order_by(clause):
t = t.element
if isinstance(t, (_textual_label_reference)):
continue
- cols.add(t)
+ if t not in cols:
+ cols.add(t)
+ result.append(t)
else:
for c in t.get_children():
stack.append(c)
- return cols
+ return result
def clause_is_present(clause, search):
diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py
index 7482e32a1..bd6377eb7 100644
--- a/lib/sqlalchemy/testing/__init__.py
+++ b/lib/sqlalchemy/testing/__init__.py
@@ -21,7 +21,8 @@ def against(*queries):
from .assertions import emits_warning, emits_warning_on, uses_deprecated, \
eq_, ne_, le_, is_, is_not_, startswith_, assert_raises, \
assert_raises_message, AssertsCompiledSQL, ComparesTables, \
- AssertsExecutionResults, expect_deprecated, expect_warnings
+ AssertsExecutionResults, expect_deprecated, expect_warnings, \
+ in_, not_in_
from .util import run_as_contextmanager, rowset, fail, \
provide_metadata, adict, force_drop_names, \
diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py
index 01fa0b8a9..21dc3e71a 100644
--- a/lib/sqlalchemy/testing/assertions.py
+++ b/lib/sqlalchemy/testing/assertions.py
@@ -229,6 +229,16 @@ def is_not_(a, b, msg=None):
assert a is not b, msg or "%r is %r" % (a, b)
+def in_(a, b, msg=None):
+ """Assert a in b, with repr messaging on failure."""
+ assert a in b, msg or "%r not in %r" % (a, b)
+
+
+def not_in_(a, b, msg=None):
+ """Assert a in not b, with repr messaging on failure."""
+ assert a not in b, msg or "%r is in %r" % (a, b)
+
+
def startswith_(a, fragment, msg=None):
"""Assert a.startswith(fragment), with repr messaging on failure."""
assert a.startswith(fragment), msg or "%r does not start with %r" % (
diff --git a/lib/sqlalchemy/testing/distutils_run.py b/lib/sqlalchemy/testing/distutils_run.py
deleted file mode 100644
index 38de8872c..000000000
--- a/lib/sqlalchemy/testing/distutils_run.py
+++ /dev/null
@@ -1,11 +0,0 @@
-"""Quick and easy way to get setup.py test to run py.test without any
-custom setuptools/distutils code.
-
-"""
-import unittest
-import pytest
-
-
-class TestSuite(unittest.TestCase):
- def test_sqlalchemy(self):
- pytest.main(["-n", "4", "-q"])
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index 972dec3a9..c7d06fceb 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -407,19 +407,19 @@ def future(fn, *arg):
def fails_on(db, reason=None):
- return fails_if(SpecPredicate(db), reason)
+ return fails_if(Predicate.as_predicate(db), reason)
def fails_on_everything_except(*dbs):
return succeeds_if(
OrPredicate([
- SpecPredicate(db) for db in dbs
+ Predicate.as_predicate(db) for db in dbs
])
)
def skip(db, reason=None):
- return skip_if(SpecPredicate(db), reason)
+ return skip_if(Predicate.as_predicate(db), reason)
def only_on(dbs, reason=None):
diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py
index 77527571b..3f9ddae73 100644
--- a/lib/sqlalchemy/testing/provision.py
+++ b/lib/sqlalchemy/testing/provision.py
@@ -2,7 +2,7 @@ from sqlalchemy.engine import url as sa_url
from sqlalchemy import text
from sqlalchemy.util import compat
from . import config, engines
-
+import os
FOLLOWER_IDENT = None
@@ -52,6 +52,7 @@ def setup_config(db_url, options, file_config, follower_ident):
db_opts = {}
_update_db_opts(db_url, db_opts)
eng = engines.testing_engine(db_url, db_opts)
+ _post_configure_engine(db_url, eng, follower_ident)
eng.connect().close()
cfg = config.Config.register(eng, db_opts, options, file_config)
if follower_ident:
@@ -106,6 +107,11 @@ def _configure_follower(cfg, ident):
@register.init
+def _post_configure_engine(url, engine, follower_ident):
+ pass
+
+
+@register.init
def _follower_url_from_main(url, ident):
url = sa_url.make_url(url)
url.database = ident
@@ -126,6 +132,23 @@ def _sqlite_follower_url_from_main(url, ident):
return sa_url.make_url("sqlite:///%s.db" % ident)
+@_post_configure_engine.for_db("sqlite")
+def _sqlite_post_configure_engine(url, engine, follower_ident):
+ from sqlalchemy import event
+
+ @event.listens_for(engine, "connect")
+ def connect(dbapi_connection, connection_record):
+ # use file DBs in all cases, memory acts kind of strangely
+ # as an attached
+ if not follower_ident:
+ dbapi_connection.execute(
+ 'ATTACH DATABASE "test_schema.db" AS test_schema')
+ else:
+ dbapi_connection.execute(
+ 'ATTACH DATABASE "%s_test_schema.db" AS test_schema'
+ % follower_ident)
+
+
@_create_db.for_db("postgresql")
def _pg_create_db(cfg, eng, ident):
with eng.connect().execution_options(
@@ -176,8 +199,10 @@ def _pg_drop_db(cfg, eng, ident):
@_drop_db.for_db("sqlite")
def _sqlite_drop_db(cfg, eng, ident):
- pass
- #os.remove("%s.db" % ident)
+ if ident:
+ os.remove("%s_test_schema.db" % ident)
+ else:
+ os.remove("%s.db" % ident)
@_drop_db.for_db("mysql")
diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py
index e8b3a995f..15bfad831 100644
--- a/lib/sqlalchemy/testing/requirements.py
+++ b/lib/sqlalchemy/testing/requirements.py
@@ -111,6 +111,32 @@ class SuiteRequirements(Requirements):
return exclusions.open()
@property
+ def parens_in_union_contained_select_w_limit_offset(self):
+ """Target database must support parenthesized SELECT in UNION
+ when LIMIT/OFFSET is specifically present.
+
+ E.g. (SELECT ...) UNION (SELECT ..)
+
+ This is known to fail on SQLite.
+
+ """
+ return exclusions.open()
+
+ @property
+ def parens_in_union_contained_select_wo_limit_offset(self):
+ """Target database must support parenthesized SELECT in UNION
+ when OFFSET/LIMIT is specifically not present.
+
+ E.g. (SELECT ... LIMIT ..) UNION (SELECT .. OFFSET ..)
+
+ This is known to fail on SQLite. It also fails on Oracle
+ because without LIMIT/OFFSET, there is currently no step that
+ creates an additional subquery.
+
+ """
+ return exclusions.open()
+
+ @property
def boolean_col_expressions(self):
"""Target database must support boolean expressions as columns"""
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 3edbdeb8c..288a85973 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -531,12 +531,20 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.provide_metadata
def _test_get_unique_constraints(self, schema=None):
+ # SQLite dialect needs to parse the names of the constraints
+ # separately from what it gets from PRAGMA index_list(), and
+ # then matches them up. so same set of column_names in two
+ # constraints will confuse it. Perhaps we should no longer
+ # bother with index_list() here since we have the whole
+ # CREATE TABLE?
uniques = sorted(
[
{'name': 'unique_a', 'column_names': ['a']},
{'name': 'unique_a_b_c', 'column_names': ['a', 'b', 'c']},
{'name': 'unique_c_a_b', 'column_names': ['c', 'a', 'b']},
{'name': 'unique_asc_key', 'column_names': ['asc', 'key']},
+ {'name': 'i.have.dots', 'column_names': ['b']},
+ {'name': 'i have spaces', 'column_names': ['c']},
],
key=operator.itemgetter('name')
)
diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py
index d4bf63b55..e7de356b8 100644
--- a/lib/sqlalchemy/testing/suite/test_select.py
+++ b/lib/sqlalchemy/testing/suite/test_select.py
@@ -2,7 +2,7 @@ from .. import fixtures, config
from ..assertions import eq_
from sqlalchemy import util
-from sqlalchemy import Integer, String, select, func, bindparam
+from sqlalchemy import Integer, String, select, func, bindparam, union
from sqlalchemy import testing
from ..schema import Table, Column
@@ -146,7 +146,7 @@ class LimitOffsetTest(fixtures.TablesTest):
select([table]).order_by(table.c.id).limit(2).offset(1),
[(2, 2, 3), (3, 3, 4)]
)
-
+
@testing.requires.offset
def test_limit_offset_nobinds(self):
"""test that 'literal binds' mode works - no bound params."""
@@ -190,3 +190,123 @@ class LimitOffsetTest(fixtures.TablesTest):
[(2, 2, 3), (3, 3, 4)],
params={"l": 2, "o": 1}
)
+
+
+class CompoundSelectTest(fixtures.TablesTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table("some_table", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer),
+ Column('y', Integer))
+
+ @classmethod
+ def insert_data(cls):
+ config.db.execute(
+ cls.tables.some_table.insert(),
+ [
+ {"id": 1, "x": 1, "y": 2},
+ {"id": 2, "x": 2, "y": 3},
+ {"id": 3, "x": 3, "y": 4},
+ {"id": 4, "x": 4, "y": 5},
+ ]
+ )
+
+ def _assert_result(self, select, result, params=()):
+ eq_(
+ config.db.execute(select, params).fetchall(),
+ result
+ )
+
+ def test_plain_union(self):
+ table = self.tables.some_table
+ s1 = select([table]).where(table.c.id == 2)
+ s2 = select([table]).where(table.c.id == 3)
+
+ u1 = union(s1, s2)
+ self._assert_result(
+ u1.order_by(u1.c.id),
+ [(2, 2, 3), (3, 3, 4)]
+ )
+
+ def test_select_from_plain_union(self):
+ table = self.tables.some_table
+ s1 = select([table]).where(table.c.id == 2)
+ s2 = select([table]).where(table.c.id == 3)
+
+ u1 = union(s1, s2).alias().select()
+ self._assert_result(
+ u1.order_by(u1.c.id),
+ [(2, 2, 3), (3, 3, 4)]
+ )
+
+ @testing.requires.parens_in_union_contained_select_w_limit_offset
+ def test_limit_offset_selectable_in_unions(self):
+ table = self.tables.some_table
+ s1 = select([table]).where(table.c.id == 2).\
+ limit(1).order_by(table.c.id)
+ s2 = select([table]).where(table.c.id == 3).\
+ limit(1).order_by(table.c.id)
+
+ u1 = union(s1, s2).limit(2)
+ self._assert_result(
+ u1.order_by(u1.c.id),
+ [(2, 2, 3), (3, 3, 4)]
+ )
+
+ @testing.requires.parens_in_union_contained_select_wo_limit_offset
+ def test_order_by_selectable_in_unions(self):
+ table = self.tables.some_table
+ s1 = select([table]).where(table.c.id == 2).\
+ order_by(table.c.id)
+ s2 = select([table]).where(table.c.id == 3).\
+ order_by(table.c.id)
+
+ u1 = union(s1, s2).limit(2)
+ self._assert_result(
+ u1.order_by(u1.c.id),
+ [(2, 2, 3), (3, 3, 4)]
+ )
+
+ def test_distinct_selectable_in_unions(self):
+ table = self.tables.some_table
+ s1 = select([table]).where(table.c.id == 2).\
+ distinct()
+ s2 = select([table]).where(table.c.id == 3).\
+ distinct()
+
+ u1 = union(s1, s2).limit(2)
+ self._assert_result(
+ u1.order_by(u1.c.id),
+ [(2, 2, 3), (3, 3, 4)]
+ )
+
+ @testing.requires.parens_in_union_contained_select_w_limit_offset
+ def test_limit_offset_in_unions_from_alias(self):
+ table = self.tables.some_table
+ s1 = select([table]).where(table.c.id == 2).\
+ limit(1).order_by(table.c.id)
+ s2 = select([table]).where(table.c.id == 3).\
+ limit(1).order_by(table.c.id)
+
+ # this necessarily has double parens
+ u1 = union(s1, s2).alias()
+ self._assert_result(
+ u1.select().limit(2).order_by(u1.c.id),
+ [(2, 2, 3), (3, 3, 4)]
+ )
+
+ def test_limit_offset_aliased_selectable_in_unions(self):
+ table = self.tables.some_table
+ s1 = select([table]).where(table.c.id == 2).\
+ limit(1).order_by(table.c.id).alias().select()
+ s2 = select([table]).where(table.c.id == 3).\
+ limit(1).order_by(table.c.id).alias().select()
+
+ u1 = union(s1, s2).limit(2)
+ self._assert_result(
+ u1.order_by(u1.c.id),
+ [(2, 2, 3), (3, 3, 4)]
+ )
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index 9ab92e90b..d82e683d9 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -16,7 +16,8 @@ __all__ = ['TypeEngine', 'TypeDecorator', 'UserDefinedType',
'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer',
'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime',
'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode',
- 'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum']
+ 'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum',
+ 'Indexable', 'Array']
from .sql.type_api import (
adapt_type,
@@ -27,6 +28,7 @@ from .sql.type_api import (
UserDefinedType
)
from .sql.sqltypes import (
+ Array,
BIGINT,
BINARY,
BLOB,
@@ -46,6 +48,7 @@ from .sql.sqltypes import (
Enum,
FLOAT,
Float,
+ Indexable,
INT,
INTEGER,
Integer,
@@ -74,5 +77,4 @@ from .sql.sqltypes import (
UnicodeText,
VARBINARY,
VARCHAR,
- _type_map
)
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index 499515142..743afccfd 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -805,6 +805,8 @@ class MemoizedSlots(object):
"""
+ __slots__ = ()
+
def _fallback_getattr(self, key):
raise AttributeError(key)
@@ -1017,7 +1019,9 @@ def constructor_copy(obj, cls, *args, **kw):
"""
names = get_cls_kwargs(cls)
- kw.update((k, obj.__dict__[k]) for k in names if k in obj.__dict__)
+ kw.update(
+ (k, obj.__dict__[k]) for k in names.difference(kw)
+ if k in obj.__dict__)
return cls(*args, **kw)
diff --git a/setup.py b/setup.py
index 09b524cd2..5b97cb9fe 100644
--- a/setup.py
+++ b/setup.py
@@ -1,40 +1,20 @@
-"""setup.py
-
-Please see README for basic installation instructions.
-
-"""
-
import os
+import platform
import re
import sys
from distutils.command.build_ext import build_ext
-from distutils.errors import (CCompilerError, DistutilsExecError,
- DistutilsPlatformError)
-
-has_feature = False
-try:
- from setuptools import setup, Extension
- try:
- # see
- # https://bitbucket.org/pypa/setuptools/issue/65/deprecate-and-remove-features,
- # where they may remove Feature.
- from setuptools import Feature
- has_feature = True
- except ImportError:
- pass
-except ImportError:
- from distutils.core import setup, Extension
-
-py3k = False
+from distutils.errors import CCompilerError
+from distutils.errors import DistutilsExecError
+from distutils.errors import DistutilsPlatformError
+from setuptools import Distribution as _Distribution, Extension
+from setuptools import setup
+from setuptools import find_packages
+from setuptools.command.test import test as TestCommand
cmdclass = {}
-extra = {}
if sys.version_info < (2, 6):
raise Exception("SQLAlchemy requires Python 2.6 or higher.")
-elif sys.version_info >= (3, 0):
- py3k = True
-import platform
cpython = platform.python_implementation() == 'CPython'
ext_modules = [
@@ -44,7 +24,7 @@ ext_modules = [
sources=['lib/sqlalchemy/cextension/resultproxy.c']),
Extension('sqlalchemy.cutils',
sources=['lib/sqlalchemy/cextension/utils.c'])
- ]
+]
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
if sys.platform == 'win32':
@@ -82,6 +62,44 @@ class ve_build_ext(build_ext):
cmdclass['build_ext'] = ve_build_ext
+class Distribution(_Distribution):
+
+ def has_ext_modules(self):
+ # We want to always claim that we have ext_modules. This will be fine
+ # if we don't actually have them (such as on PyPy) because nothing
+ # will get built, however we don't want to provide an overally broad
+ # Wheel package when building a wheel without C support. This will
+ # ensure that Wheel knows to treat us as if the build output is
+ # platform specific.
+ return True
+
+
+class PyTest(TestCommand):
+ # from https://pytest.org/latest/goodpractises.html\
+ # #integration-with-setuptools-test-commands
+ user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
+
+ default_options = ["-n", "4", "-q"]
+
+ def initialize_options(self):
+ TestCommand.initialize_options(self)
+ self.pytest_args = ""
+
+ def finalize_options(self):
+ TestCommand.finalize_options(self)
+ self.test_args = []
+ self.test_suite = True
+
+ def run_tests(self):
+ # import here, cause outside the eggs aren't loaded
+ import pytest
+ errno = pytest.main(
+ " ".join(self.default_options) + " " + self.pytest_args)
+ sys.exit(errno)
+
+cmdclass['test'] = PyTest
+
+
def status_msgs(*msgs):
print('*' * 75)
for msg in msgs:
@@ -89,66 +107,53 @@ def status_msgs(*msgs):
print('*' * 75)
-def find_packages(location):
- packages = []
- for pkg in ['sqlalchemy']:
- for _dir, subdirectories, files in (
- os.walk(os.path.join(location, pkg))):
- if '__init__.py' in files:
- tokens = _dir.split(os.sep)[len(location.split(os.sep)):]
- packages.append(".".join(tokens))
- return packages
+with open(
+ os.path.join(
+ os.path.dirname(__file__),
+ 'lib', 'sqlalchemy', '__init__.py')) as v_file:
+ VERSION = re.compile(
+ r".*__version__ = '(.*?)'",
+ re.S).match(v_file.read()).group(1)
-v_file = open(os.path.join(os.path.dirname(__file__),
- 'lib', 'sqlalchemy', '__init__.py'))
-VERSION = re.compile(r".*__version__ = '(.*?)'",
- re.S).match(v_file.read()).group(1)
-v_file.close()
-
-r_file = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
-readme = r_file.read()
-r_file.close()
+with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as r_file:
+ readme = r_file.read()
def run_setup(with_cext):
- kwargs = extra.copy()
+ kwargs = {}
if with_cext:
- if has_feature:
- kwargs['features'] = {'cextensions': Feature(
- "optional C speed-enhancements",
- standard=True,
- ext_modules=ext_modules
- )}
- else:
- kwargs['ext_modules'] = ext_modules
-
- setup(name="SQLAlchemy",
- version=VERSION,
- description="Database Abstraction Library",
- author="Mike Bayer",
- author_email="mike_mp@zzzcomputing.com",
- url="http://www.sqlalchemy.org",
- packages=find_packages('lib'),
- package_dir={'': 'lib'},
- license="MIT License",
- cmdclass=cmdclass,
- tests_require=['pytest >= 2.5.2', 'mock', 'pytest-xdist'],
- test_suite="sqlalchemy.testing.distutils_run",
- long_description=readme,
- classifiers=[
- "Development Status :: 5 - Production/Stable",
- "Intended Audience :: Developers",
- "License :: OSI Approved :: MIT License",
- "Programming Language :: Python",
- "Programming Language :: Python :: 3",
- "Programming Language :: Python :: Implementation :: CPython",
- "Programming Language :: Python :: Implementation :: Jython",
- "Programming Language :: Python :: Implementation :: PyPy",
- "Topic :: Database :: Front-Ends",
- "Operating System :: OS Independent",
- ],
- **kwargs
- )
+ kwargs['ext_modules'] = ext_modules
+ else:
+ kwargs['ext_modules'] = []
+
+ setup(
+ name="SQLAlchemy",
+ version=VERSION,
+ description="Database Abstraction Library",
+ author="Mike Bayer",
+ author_email="mike_mp@zzzcomputing.com",
+ url="http://www.sqlalchemy.org",
+ packages=find_packages('lib'),
+ package_dir={'': 'lib'},
+ license="MIT License",
+ cmdclass=cmdclass,
+ tests_require=['pytest >= 2.5.2', 'mock', 'pytest-xdist'],
+ long_description=readme,
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: Jython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Database :: Front-Ends",
+ "Operating System :: OS Independent",
+ ],
+ distclass=Distribution,
+ **kwargs
+ )
if not cpython:
run_setup(False)
diff --git a/test/aaa_profiling/test_compiler.py b/test/aaa_profiling/test_compiler.py
index 5eece4602..5095be103 100644
--- a/test/aaa_profiling/test_compiler.py
+++ b/test/aaa_profiling/test_compiler.py
@@ -32,8 +32,8 @@ class CompileTest(fixtures.TestBase, AssertsExecutionResults):
for t in (t1, t2):
for c in t.c:
c.type._type_affinity
- from sqlalchemy import types
- for t in list(types._type_map.values()):
+ from sqlalchemy.sql import sqltypes
+ for t in list(sqltypes._type_map.values()):
t._type_affinity
cls.dialect = default.DefaultDialect()
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index 256f52850..8074de53e 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -2,13 +2,14 @@ import copy
from sqlalchemy import util, sql, exc, testing
from sqlalchemy.testing import assert_raises, assert_raises_message, fixtures
-from sqlalchemy.testing import eq_, is_, ne_, fails_if
+from sqlalchemy.testing import eq_, is_, ne_, fails_if, mock
from sqlalchemy.testing.util import picklers, gc_collect
from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec
from sqlalchemy.sql import column
from sqlalchemy.util import langhelpers
import inspect
+
class _KeyedTupleTest(object):
def _fixture(self, values, labels):
@@ -284,6 +285,33 @@ class MemoizedAttrTest(fixtures.TestBase):
eq_(f1.bar(), 20)
eq_(val[0], 21)
+ def test_memoized_slots(self):
+ canary = mock.Mock()
+
+ class Foob(util.MemoizedSlots):
+ __slots__ = ('foo_bar', 'gogo')
+
+ def _memoized_method_gogo(self):
+ canary.method()
+ return "gogo"
+
+ def _memoized_attr_foo_bar(self):
+ canary.attr()
+ return "foobar"
+
+ f1 = Foob()
+ assert_raises(AttributeError, setattr, f1, "bar", "bat")
+
+ eq_(f1.foo_bar, "foobar")
+
+ eq_(f1.foo_bar, "foobar")
+
+ eq_(f1.gogo(), "gogo")
+
+ eq_(f1.gogo(), "gogo")
+
+ eq_(canary.mock_calls, [mock.call.attr(), mock.call.method()])
+
class ToListTest(fixtures.TestBase):
def test_from_string(self):
diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py
index 9d89f040b..80be9f67d 100644
--- a/test/dialect/mssql/test_compiler.py
+++ b/test/dialect/mssql/test_compiler.py
@@ -12,7 +12,7 @@ from sqlalchemy import Integer, String, Table, Column, select, MetaData,\
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
- __dialect__ = mssql.dialect(legacy_schema_aliasing=False)
+ __dialect__ = mssql.dialect()
def test_true_false(self):
self.assert_compile(
diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py
index 61ae32ef4..32edfd7eb 100644
--- a/test/dialect/mssql/test_query.py
+++ b/test/dialect/mssql/test_query.py
@@ -41,17 +41,15 @@ class LegacySchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL):
)
def _assert_sql(self, element, legacy_sql, modern_sql=None):
- dialect = mssql.dialect()
+ dialect = mssql.dialect(legacy_schema_aliasing=True)
- with assertions.expect_warnings(
- "legacy_schema_aliasing flag is defaulted to True.*"):
- self.assert_compile(
- element,
- legacy_sql,
- dialect=dialect
- )
+ self.assert_compile(
+ element,
+ legacy_sql,
+ dialect=dialect
+ )
- dialect = mssql.dialect(legacy_schema_aliasing=False)
+ dialect = mssql.dialect()
self.assert_compile(
element,
modern_sql or "foob",
diff --git a/test/dialect/mssql/test_reflection.py b/test/dialect/mssql/test_reflection.py
index bee441586..e016a6e41 100644
--- a/test/dialect/mssql/test_reflection.py
+++ b/test/dialect/mssql/test_reflection.py
@@ -1,5 +1,5 @@
# -*- encoding: utf-8
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, is_, in_
from sqlalchemy import *
from sqlalchemy import types, schema, event
from sqlalchemy.databases import mssql
@@ -24,14 +24,14 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
Column('user_name', types.VARCHAR(20), nullable=False),
Column('test1', types.CHAR(5), nullable=False),
Column('test2', types.Float(5), nullable=False),
- Column('test3', types.Text('max')),
+ Column('test3', types.Text()),
Column('test4', types.Numeric, nullable=False),
Column('test5', types.DateTime),
Column('parent_user_id', types.Integer,
ForeignKey('engine_users.user_id')),
Column('test6', types.DateTime, nullable=False),
- Column('test7', types.Text('max')),
- Column('test8', types.LargeBinary('max')),
+ Column('test7', types.Text()),
+ Column('test8', types.LargeBinary()),
Column('test_passivedefault2', types.Integer,
server_default='5'),
Column('test9', types.BINARY(100)),
@@ -171,6 +171,32 @@ class ReflectionTest(fixtures.TestBase, ComparesTables):
set([t2.c['x col'], t2.c.y])
)
+ @testing.provide_metadata
+ def test_max_ident_in_varchar_not_present(self):
+ """test [ticket:3504].
+
+ Here we are testing not just that the "max" token comes back
+ as None, but also that these types accept "max" as the value
+ of "length" on construction, which isn't a directly documented
+ pattern however is likely in common use.
+
+ """
+ metadata = self.metadata
+
+ Table(
+ 't', metadata,
+ Column('t1', types.String),
+ Column('t2', types.Text('max')),
+ Column('t3', types.Text('max')),
+ Column('t4', types.LargeBinary('max')),
+ Column('t5', types.VARBINARY('max')),
+ )
+ metadata.create_all()
+ for col in inspect(testing.db).get_columns('t'):
+ is_(col['type'].length, None)
+ in_('max', str(col['type'].compile(dialect=testing.db.dialect)))
+
+
from sqlalchemy.dialects.mssql.information_schema import CoerceUnicode, tables
from sqlalchemy.dialects.mssql import base
@@ -187,7 +213,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = :table_name_1",
+ "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = :table_name_1",
dialect=dialect
)
@@ -197,7 +223,7 @@ class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
stmt = tables.c.table_name == 'somename'
self.assert_compile(
stmt,
- "[TABLES_1].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
+ "[INFORMATION_SCHEMA].[TABLES].[TABLE_NAME] = CAST(:table_name_1 AS NVARCHAR(max))",
dialect=dialect
)
diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py
index 17ceb6b61..e782bd5e5 100644
--- a/test/dialect/mssql/test_types.py
+++ b/test/dialect/mssql/test_types.py
@@ -313,9 +313,7 @@ class TypeRoundTripTest(
def teardown(self):
metadata.drop_all()
- @testing.fails_on_everything_except(
- 'mssql+pyodbc',
- 'this is some pyodbc-specific feature')
+ @testing.fails_on_everything_except('mssql+pyodbc')
def test_decimal_notation(self):
numeric_table = Table(
'numeric_table', metadata,
diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py
index f19177c2a..85513167c 100644
--- a/test/dialect/mysql/test_query.py
+++ b/test/dialect/mysql/test_query.py
@@ -5,7 +5,6 @@ from sqlalchemy import *
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import testing
-
class IdiosyncrasyTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = 'mysql'
__backend__ = True
@@ -177,3 +176,57 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
eq_([1, 3, 5], [r.id for r in results])
+class AnyAllTest(fixtures.TablesTest, AssertsCompiledSQL):
+ __only_on__ = 'mysql'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'stuff', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('value', Integer)
+ )
+
+ @classmethod
+ def insert_data(cls):
+ stuff = cls.tables.stuff
+ testing.db.execute(
+ stuff.insert(),
+ [
+ {'id': 1, 'value': 1},
+ {'id': 2, 'value': 2},
+ {'id': 3, 'value': 3},
+ {'id': 4, 'value': 4},
+ {'id': 5, 'value': 5},
+ ]
+ )
+
+ def test_any_w_comparator(self):
+ stuff = self.tables.stuff
+ stmt = select([stuff.c.id]).where(
+ stuff.c.value > any_(select([stuff.c.value])))
+
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [(2,), (3,), (4,), (5,)]
+ )
+
+ def test_all_w_comparator(self):
+ stuff = self.tables.stuff
+ stmt = select([stuff.c.id]).where(
+ stuff.c.value >= all_(select([stuff.c.value])))
+
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [(5,)]
+ )
+
+ def test_any_literal(self):
+ stuff = self.tables.stuff
+ stmt = select([4 == any_(select([stuff.c.value]))])
+
+ is_(
+ testing.db.execute(stmt).scalar(), True
+ )
+
diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py
index 9fa5c9804..78217bd82 100644
--- a/test/dialect/postgresql/test_compiler.py
+++ b/test/dialect/postgresql/test_compiler.py
@@ -9,11 +9,13 @@ from sqlalchemy import Sequence, Table, Column, Integer, update, String,\
Text
from sqlalchemy.dialects.postgresql import ExcludeConstraint, array
from sqlalchemy import exc, schema
-from sqlalchemy.dialects.postgresql import base as postgresql
+from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import TSRANGE
from sqlalchemy.orm import mapper, aliased, Session
-from sqlalchemy.sql import table, column, operators
+from sqlalchemy.sql import table, column, operators, literal_column
+from sqlalchemy.sql import util as sql_util
from sqlalchemy.util import u
+from sqlalchemy.dialects.postgresql import aggregate_order_by
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -21,7 +23,7 @@ class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
def test_format(self):
seq = Sequence('my_seq_no_schema')
- dialect = postgresql.PGDialect()
+ dialect = postgresql.dialect()
assert dialect.identifier_preparer.format_sequence(seq) \
== 'my_seq_no_schema'
seq = Sequence('my_seq', schema='some_schema')
@@ -693,7 +695,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self._test_array_zero_indexes(False)
def test_array_literal_type(self):
- is_(postgresql.array([1, 2]).type._type_affinity, postgresql.ARRAY)
+ isinstance(postgresql.array([1, 2]).type, postgresql.ARRAY)
is_(postgresql.array([1, 2]).type.item_type._type_affinity, Integer)
is_(postgresql.array([1, 2], type_=String).
@@ -800,6 +802,48 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
dialect=postgresql.dialect()
)
+ def test_aggregate_order_by_one(self):
+ m = MetaData()
+ table = Table('table1', m, Column('a', Integer), Column('b', Integer))
+ expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
+ stmt = select([expr])
+
+ # note this tests that the object exports FROM objects
+ # correctly
+ self.assert_compile(
+ stmt,
+ "SELECT array_agg(table1.a ORDER BY table1.b DESC) "
+ "AS array_agg_1 FROM table1"
+ )
+
+ def test_aggregate_order_by_two(self):
+ m = MetaData()
+ table = Table('table1', m, Column('a', Integer), Column('b', Integer))
+ expr = func.string_agg(
+ table.c.a,
+ aggregate_order_by(literal_column("','"), table.c.a)
+ )
+ stmt = select([expr])
+
+ self.assert_compile(
+ stmt,
+ "SELECT string_agg(table1.a, ',' ORDER BY table1.a) "
+ "AS string_agg_1 FROM table1"
+ )
+
+ def test_aggregate_order_by_adapt(self):
+ m = MetaData()
+ table = Table('table1', m, Column('a', Integer), Column('b', Integer))
+ expr = func.array_agg(aggregate_order_by(table.c.a, table.c.b.desc()))
+ stmt = select([expr])
+
+ a1 = table.alias('foo')
+ stmt2 = sql_util.ClauseAdapter(a1).traverse(stmt)
+ self.assert_compile(
+ stmt2,
+ "SELECT array_agg(foo.a ORDER BY foo.b DESC) AS array_agg_1 FROM table1 AS foo"
+ )
+
class DistinctOnTest(fixtures.TestBase, AssertsCompiledSQL):
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index 0354fa436..ee87e7325 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -673,6 +673,7 @@ class ReflectionTest(fixtures.TestBase):
eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}])
conn.close()
+ @testing.fails_if("postgresql < 8.2", "reloptions not supported")
@testing.provide_metadata
def test_index_reflection_with_storage_options(self):
"""reflect indexes with storage options set"""
diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py
index fac0f2df8..6ed90c76d 100644
--- a/test/dialect/postgresql/test_types.py
+++ b/test/dialect/postgresql/test_types.py
@@ -7,11 +7,11 @@ from sqlalchemy import testing
import datetime
from sqlalchemy import Table, MetaData, Column, Integer, Enum, Float, select, \
func, DateTime, Numeric, exc, String, cast, REAL, TypeDecorator, Unicode, \
- Text, null, text
+ Text, null, text, column, Array, any_, all_
from sqlalchemy.sql import operators
from sqlalchemy import types
import sqlalchemy as sa
-from sqlalchemy.dialects.postgresql import base as postgresql
+from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import HSTORE, hstore, array, \
INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, TSTZRANGE, \
JSON, JSONB
@@ -20,6 +20,8 @@ from sqlalchemy import util
from sqlalchemy.testing.util import round_decimal
from sqlalchemy import inspect
from sqlalchemy import event
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import Session
tztable = notztable = metadata = table = None
@@ -497,6 +499,34 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults):
finally:
metadata.drop_all()
+ @testing.provide_metadata
+ def test_custom_subclass(self):
+ class MyEnum(TypeDecorator):
+ impl = Enum('oneHI', 'twoHI', 'threeHI', name='myenum')
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value += "HI"
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ value += "THERE"
+ return value
+
+ t1 = Table(
+ 'table1', self.metadata,
+ Column('data', MyEnum())
+ )
+ self.metadata.create_all(testing.db)
+
+ with testing.db.connect() as conn:
+ conn.execute(t1.insert(), {"data": "two"})
+ eq_(
+ conn.scalar(select([t1.c.data])),
+ "twoHITHERE"
+ )
+
class OIDTest(fixtures.TestBase):
__only_on__ = 'postgresql'
@@ -698,7 +728,178 @@ class TimePrecisionTest(fixtures.TestBase, AssertsCompiledSQL):
eq_(t2.c.c6.type.timezone, True)
-class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
+class ArrayTest(AssertsCompiledSQL, fixtures.TestBase):
+ __dialect__ = 'postgresql'
+
+ def test_array_int_index(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col[3]]),
+ "SELECT x[%(x_1)s] AS anon_1",
+ checkparams={'x_1': 3}
+ )
+
+ def test_array_any(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.any(7, operator=operators.lt)]),
+ "SELECT %(param_1)s < ANY (x) AS anon_1",
+ checkparams={'param_1': 7}
+ )
+
+ def test_array_all(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.all(7, operator=operators.lt)]),
+ "SELECT %(param_1)s < ALL (x) AS anon_1",
+ checkparams={'param_1': 7}
+ )
+
+ def test_array_contains(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.contains(array([4, 5, 6]))]),
+ "SELECT x @> ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
+ "AS anon_1",
+ checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
+ )
+
+ def test_array_contained_by(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.contained_by(array([4, 5, 6]))]),
+ "SELECT x <@ ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
+ "AS anon_1",
+ checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
+ )
+
+ def test_array_overlap(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col.overlap(array([4, 5, 6]))]),
+ "SELECT x && ARRAY[%(param_1)s, %(param_2)s, %(param_3)s] "
+ "AS anon_1",
+ checkparams={'param_1': 4, 'param_3': 6, 'param_2': 5}
+ )
+
+ def test_array_slice_index(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ self.assert_compile(
+ select([col[5:10]]),
+ "SELECT x[%(x_1)s:%(x_2)s] AS anon_1",
+ checkparams={'x_2': 10, 'x_1': 5}
+ )
+
+ def test_array_dim_index(self):
+ col = column('x', postgresql.ARRAY(Integer, dimensions=2))
+ self.assert_compile(
+ select([col[3][5]]),
+ "SELECT x[%(x_1)s][%(param_1)s] AS anon_1",
+ checkparams={'x_1': 3, 'param_1': 5}
+ )
+
+ def test_array_concat(self):
+ col = column('x', postgresql.ARRAY(Integer))
+ literal = array([4, 5])
+
+ self.assert_compile(
+ select([col + literal]),
+ "SELECT x || ARRAY[%(param_1)s, %(param_2)s] AS anon_1",
+ checkparams={'param_1': 4, 'param_2': 5}
+ )
+
+ def test_array_index_map_dimensions(self):
+ col = column('x', postgresql.ARRAY(Integer, dimensions=3))
+ is_(
+ col[5].type._type_affinity, Array
+ )
+ assert isinstance(
+ col[5].type, postgresql.ARRAY
+ )
+ eq_(
+ col[5].type.dimensions, 2
+ )
+ is_(
+ col[5][6].type._type_affinity, Array
+ )
+ assert isinstance(
+ col[5][6].type, postgresql.ARRAY
+ )
+ eq_(
+ col[5][6].type.dimensions, 1
+ )
+ is_(
+ col[5][6][7].type._type_affinity, Integer
+ )
+
+ def test_array_getitem_single_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', postgresql.ARRAY(Integer)),
+ Column('strarr', postgresql.ARRAY(String)),
+ )
+ is_(arrtable.c.intarr[1].type._type_affinity, Integer)
+ is_(arrtable.c.strarr[1].type._type_affinity, String)
+
+ def test_array_getitem_slice_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', postgresql.ARRAY(Integer)),
+ Column('strarr', postgresql.ARRAY(String)),
+ )
+
+ # type affinity is Array...
+ is_(arrtable.c.intarr[1:3].type._type_affinity, Array)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, Array)
+
+ # but the slice returns the actual type
+ assert isinstance(arrtable.c.intarr[1:3].type, postgresql.ARRAY)
+ assert isinstance(arrtable.c.strarr[1:3].type, postgresql.ARRAY)
+
+ def test_array_functions_plus_getitem(self):
+ """test parenthesizing of functions plus indexing, which seems
+ to be required by Postgresql.
+
+ """
+ stmt = select([
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ )[2:5]
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT (array_cat(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s], "
+ "ARRAY[%(param_4)s, %(param_5)s, %(param_6)s]))"
+ "[%(param_7)s:%(param_8)s] AS anon_1"
+ )
+
+ self.assert_compile(
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ )[3],
+ "(array_cat(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s], "
+ "ARRAY[%(param_4)s, %(param_5)s, %(param_6)s]))[%(param_7)s]"
+ )
+
+ def test_array_agg_generic(self):
+ expr = func.array_agg(column('q', Integer))
+ is_(expr.type.__class__, types.Array)
+ is_(expr.type.item_type.__class__, Integer)
+
+ def test_array_agg_specific(self):
+ from sqlalchemy.dialects.postgresql import array_agg
+ expr = array_agg(column('q', Integer))
+ is_(expr.type.__class__, postgresql.ARRAY)
+ is_(expr.type.item_type.__class__, Integer)
+
+
+class ArrayRoundTripTest(fixtures.TablesTest, AssertsExecutionResults):
__only_on__ = 'postgresql'
__backend__ = True
@@ -754,6 +955,89 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
assert isinstance(tbl.c.intarr.type.item_type, Integer)
assert isinstance(tbl.c.strarr.type.item_type, String)
+ @testing.provide_metadata
+ def test_array_agg(self):
+ values_table = Table('values', self.metadata, Column('value', Integer))
+ self.metadata.create_all(testing.db)
+ testing.db.execute(
+ values_table.insert(),
+ [{'value': i} for i in range(1, 10)]
+ )
+
+ stmt = select([func.array_agg(values_table.c.value)])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ list(range(1, 10))
+ )
+
+ stmt = select([func.array_agg(values_table.c.value)[3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ 3
+ )
+
+ stmt = select([func.array_agg(values_table.c.value)[2:4]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3, 4]
+ )
+
+ def test_array_index_slice_exprs(self):
+ """test a variety of expressions that sometimes need parenthesizing"""
+
+ stmt = select([array([1, 2, 3, 4])[2:3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3]
+ )
+
+ stmt = select([array([1, 2, 3, 4])[2]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ 2
+ )
+
+ stmt = select([(array([1, 2]) + array([3, 4]))[2:3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3]
+ )
+
+ stmt = select([array([1, 2]) + array([3, 4])[2:3]])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [1, 2, 4]
+ )
+
+ stmt = select([array([1, 2])[2:3] + array([3, 4])])
+ eq_(
+ testing.db.execute(stmt).scalar(),
+ [2, 3, 4]
+ )
+
+ stmt = select([
+ func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ )[2:5]
+ ])
+ eq_(
+ testing.db.execute(stmt).scalar(), [2, 3, 4, 5]
+ )
+
+ def test_any_all_exprs(self):
+ stmt = select([
+ 3 == any_(func.array_cat(
+ array([1, 2, 3]),
+ array([4, 5, 6]),
+ type_=postgresql.ARRAY(Integer)
+ ))
+ ])
+ eq_(
+ testing.db.execute(stmt).scalar(), True
+ )
+
def test_insert_array(self):
arrtable = self.tables.arrtable
arrtable.insert().execute(intarr=[1, 2, 3], strarr=[util.u('abc'),
@@ -828,16 +1112,6 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
), True
)
- def test_array_getitem_single_type(self):
- arrtable = self.tables.arrtable
- is_(arrtable.c.intarr[1].type._type_affinity, Integer)
- is_(arrtable.c.strarr[1].type._type_affinity, String)
-
- def test_array_getitem_slice_type(self):
- arrtable = self.tables.arrtable
- is_(arrtable.c.intarr[1:3].type._type_affinity, postgresql.ARRAY)
- is_(arrtable.c.strarr[1:3].type._type_affinity, postgresql.ARRAY)
-
def test_array_getitem_single_exec(self):
arrtable = self.tables.arrtable
self._fixture_456(arrtable)
@@ -926,6 +1200,14 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
lambda elem: (
x for x in elem))
+ def test_multi_dim_roundtrip(self):
+ arrtable = self.tables.arrtable
+ testing.db.execute(arrtable.insert(), dimarr=[[1, 2, 3], [4, 5, 6]])
+ eq_(
+ testing.db.scalar(select([arrtable.c.dimarr])),
+ [[-1, 0, 1], [2, 3, 4]]
+ )
+
def test_array_contained_by_exec(self):
arrtable = self.tables.arrtable
with testing.db.connect() as conn:
@@ -1030,12 +1312,98 @@ class ArrayTest(fixtures.TablesTest, AssertsExecutionResults):
set([('1', '2', '3'), ('4', '5', '6'), (('4', '5'), ('6', '7'))])
)
- def test_dimension(self):
- arrtable = self.tables.arrtable
- testing.db.execute(arrtable.insert(), dimarr=[[1, 2, 3], [4, 5, 6]])
+ def test_array_plus_native_enum_create(self):
+ m = MetaData()
+ t = Table(
+ 't', m,
+ Column(
+ 'data_1',
+ postgresql.ARRAY(
+ postgresql.ENUM('a', 'b', 'c', name='my_enum_1')
+ )
+ ),
+ Column(
+ 'data_2',
+ postgresql.ARRAY(
+ types.Enum('a', 'b', 'c', name='my_enum_2')
+ )
+ )
+ )
+
+ t.create(testing.db)
eq_(
- testing.db.scalar(select([arrtable.c.dimarr])),
- [[-1, 0, 1], [2, 3, 4]]
+ set(e['name'] for e in inspect(testing.db).get_enums()),
+ set(['my_enum_1', 'my_enum_2'])
+ )
+ t.drop(testing.db)
+ eq_(inspect(testing.db).get_enums(), [])
+
+
+class HashableFlagORMTest(fixtures.TestBase):
+ """test the various 'collection' types that they flip the 'hashable' flag
+ appropriately. [ticket:3499]"""
+
+ __only_on__ = 'postgresql'
+
+ def _test(self, type_, data):
+ Base = declarative_base(metadata=self.metadata)
+
+ class A(Base):
+ __tablename__ = 'a1'
+ id = Column(Integer, primary_key=True)
+ data = Column(type_)
+ Base.metadata.create_all(testing.db)
+ s = Session(testing.db)
+ s.add_all([
+ A(data=elem) for elem in data
+ ])
+ s.commit()
+
+ eq_(
+ [(obj.A.id, obj.data) for obj in
+ s.query(A, A.data).order_by(A.id)],
+ list(enumerate(data, 1))
+ )
+
+ @testing.provide_metadata
+ def test_array(self):
+ self._test(
+ postgresql.ARRAY(Text()),
+ [['a', 'b', 'c'], ['d', 'e', 'f']]
+ )
+
+ @testing.requires.hstore
+ @testing.provide_metadata
+ def test_hstore(self):
+ self._test(
+ postgresql.HSTORE(),
+ [
+ {'a': '1', 'b': '2', 'c': '3'},
+ {'d': '4', 'e': '5', 'f': '6'}
+ ]
+ )
+
+ @testing.provide_metadata
+ def test_json(self):
+ self._test(
+ postgresql.JSON(),
+ [
+ {'a': '1', 'b': '2', 'c': '3'},
+ {'d': '4', 'e': {'e1': '5', 'e2': '6'},
+ 'f': {'f1': [9, 10, 11]}}
+ ]
+ )
+
+ @testing.requires.postgresql_jsonb
+ @testing.provide_metadata
+ def test_jsonb(self):
+ self._test(
+ postgresql.JSONB(),
+ [
+ {'a': '1', 'b': '2', 'c': '3'},
+ {'d': '4', 'e': {'e1': '5', 'e2': '6'},
+ 'f': {'f1': [9, 10, 11]}}
+ ]
)
@@ -1372,6 +1740,19 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
{"key1": "value1", "key2": "value2"}
)
+ def test_ret_type_text(self):
+ col = column('x', HSTORE())
+
+ is_(col['foo'].type.__class__, Text)
+
+ def test_ret_type_custom(self):
+ class MyType(types.UserDefinedType):
+ pass
+
+ col = column('x', HSTORE(text_type=MyType))
+
+ is_(col['foo'].type.__class__, MyType)
+
def test_where_has_key(self):
self._test_where(
# hide from 2to3
@@ -1394,7 +1775,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_where_defined(self):
self._test_where(
self.hashcol.defined('foo'),
- "defined(test_table.hash, %(param_1)s)"
+ "defined(test_table.hash, %(defined_1)s)"
)
def test_where_contains(self):
@@ -1425,7 +1806,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_delete_single_key(self):
self._test_cols(
self.hashcol.delete('foo'),
- "delete(test_table.hash, %(param_1)s) AS delete_1",
+ "delete(test_table.hash, %(delete_2)s) AS delete_1",
True
)
@@ -1440,7 +1821,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_delete_matching_pairs(self):
self._test_cols(
self.hashcol.delete(hstore('1', '2')),
- ("delete(test_table.hash, hstore(%(param_1)s, %(param_2)s)) "
+ ("delete(test_table.hash, hstore(%(hstore_1)s, %(hstore_2)s)) "
"AS delete_1"),
True
)
@@ -1456,7 +1837,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_hstore_pair_text(self):
self._test_cols(
hstore('foo', '3')['foo'],
- "hstore(%(param_1)s, %(param_2)s) -> %(hstore_1)s AS anon_1",
+ "hstore(%(hstore_1)s, %(hstore_2)s) -> %(hstore_3)s AS anon_1",
False
)
@@ -1481,14 +1862,14 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
self._test_cols(
self.hashcol.concat(hstore(cast(self.test_table.c.id, Text), '3')),
("test_table.hash || hstore(CAST(test_table.id AS TEXT), "
- "%(param_1)s) AS anon_1"),
+ "%(hstore_1)s) AS anon_1"),
True
)
def test_cols_concat_op(self):
self._test_cols(
hstore('foo', 'bar') + self.hashcol,
- "hstore(%(param_1)s, %(param_2)s) || test_table.hash AS anon_1",
+ "hstore(%(hstore_1)s, %(hstore_2)s) || test_table.hash AS anon_1",
True
)
@@ -2093,19 +2474,59 @@ class JSONTest(AssertsCompiledSQL, fixtures.TestBase):
"(test_table.test_column #> %(test_column_1)s) IS NULL"
)
+ def test_path_typing(self):
+ col = column('x', JSON())
+ is_(
+ col['q'].type._type_affinity, JSON
+ )
+ is_(
+ col[('q', )].type._type_affinity, JSON
+ )
+ is_(
+ col['q']['p'].type._type_affinity, JSON
+ )
+ is_(
+ col[('q', 'p')].type._type_affinity, JSON
+ )
+
+ def test_custom_astext_type(self):
+ class MyType(types.UserDefinedType):
+ pass
+
+ col = column('x', JSON(astext_type=MyType))
+
+ is_(
+ col['q'].astext.type.__class__, MyType
+ )
+
+ is_(
+ col[('q', 'p')].astext.type.__class__, MyType
+ )
+
+ is_(
+ col['q']['p'].astext.type.__class__, MyType
+ )
+
def test_where_getitem_as_text(self):
self._test_where(
self.jsoncol['bar'].astext == None,
"(test_table.test_column ->> %(test_column_1)s) IS NULL"
)
- def test_where_getitem_as_cast(self):
+ def test_where_getitem_astext_cast(self):
self._test_where(
- self.jsoncol['bar'].cast(Integer) == 5,
+ self.jsoncol['bar'].astext.cast(Integer) == 5,
"CAST(test_table.test_column ->> %(test_column_1)s AS INTEGER) "
"= %(param_1)s"
)
+ def test_where_getitem_json_cast(self):
+ self._test_where(
+ self.jsoncol['bar'].cast(Integer) == 5,
+ "CAST(test_table.test_column -> %(test_column_1)s AS INTEGER) "
+ "= %(param_1)s"
+ )
+
def test_where_path_as_text(self):
self._test_where(
self.jsoncol[("foo", 1)].astext == None,
@@ -2144,6 +2565,7 @@ class JSONRoundTripTest(fixtures.TablesTest):
{'name': 'r3', 'data': {"k1": "r3v1", "k2": "r3v2"}},
{'name': 'r4', 'data': {"k1": "r4v1", "k2": "r4v2"}},
{'name': 'r5', 'data': {"k1": "r5v1", "k2": "r5v2", "k3": 5}},
+ {'name': 'r6', 'data': {"k1": {"r6v1": {'subr': [1, 2, 3]}}}},
)
def _assert_data(self, compare, column='data'):
@@ -2164,6 +2586,15 @@ class JSONRoundTripTest(fixtures.TablesTest):
).fetchall()
eq_([d for d, in data], [None])
+ def _assert_column_is_JSON_NULL(self, column='data'):
+ col = self.tables.data_table.c[column]
+
+ data = testing.db.execute(
+ select([col]).
+ where(cast(col, String) == "null")
+ ).fetchall()
+ eq_([d for d, in data], [None])
+
def _test_insert(self, engine):
engine.execute(
self.tables.data_table.insert(),
@@ -2185,6 +2616,13 @@ class JSONRoundTripTest(fixtures.TablesTest):
)
self._assert_column_is_NULL(column='nulldata')
+ def _test_insert_nulljson_into_none_as_null(self, engine):
+ engine.execute(
+ self.tables.data_table.insert(),
+ {'name': 'r1', 'nulldata': JSON.NULL}
+ )
+ self._assert_column_is_JSON_NULL(column='nulldata')
+
def _non_native_engine(self, json_serializer=None, json_deserializer=None):
if json_serializer is not None or json_deserializer is not None:
options = {
@@ -2233,6 +2671,11 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = testing.db
self._test_insert_none_as_null(engine)
+ @testing.requires.psycopg2_native_json
+ def test_insert_native_nulljson_into_none_as_null(self):
+ engine = testing.db
+ self._test_insert_nulljson_into_none_as_null(engine)
+
def test_insert_python(self):
engine = self._non_native_engine()
self._test_insert(engine)
@@ -2245,6 +2688,10 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = self._non_native_engine()
self._test_insert_none_as_null(engine)
+ def test_insert_python_nulljson_into_none_as_null(self):
+ engine = self._non_native_engine()
+ self._test_insert_nulljson_into_none_as_null(engine)
+
def _test_custom_serialize_deserialize(self, native):
import json
@@ -2309,12 +2756,28 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = testing.db
self._fixture_data(engine)
data_table = self.tables.data_table
+
result = engine.execute(
- select([data_table.c.data]).where(
- data_table.c.data[('k1',)].astext == 'r3v1'
+ select([data_table.c.name]).where(
+ data_table.c.data[('k1', 'r6v1', 'subr')].astext == "[1, 2, 3]"
)
- ).first()
- eq_(result, ({'k1': 'r3v1', 'k2': 'r3v2'},))
+ )
+ eq_(result.scalar(), 'r6')
+
+ @testing.fails_on(
+ "postgresql < 9.4",
+ "Improvement in Postgresql behavior?")
+ def test_multi_index_query(self):
+ engine = testing.db
+ self._fixture_data(engine)
+ data_table = self.tables.data_table
+
+ result = engine.execute(
+ select([data_table.c.name]).where(
+ data_table.c.data['k1']['r6v1']['subr'].astext == "[1, 2, 3]"
+ )
+ )
+ eq_(result.scalar(), 'r6')
def test_query_returned_as_text(self):
engine = testing.db
@@ -2330,7 +2793,7 @@ class JSONRoundTripTest(fixtures.TablesTest):
self._fixture_data(engine)
data_table = self.tables.data_table
result = engine.execute(
- select([data_table.c.data['k3'].cast(Integer)]).where(
+ select([data_table.c.data['k3'].astext.cast(Integer)]).where(
data_table.c.name == 'r5')
).first()
assert isinstance(result[0], int)
@@ -2398,6 +2861,36 @@ class JSONRoundTripTest(fixtures.TablesTest):
engine = testing.db
self._test_unicode_round_trip(engine)
+ def test_eval_none_flag_orm(self):
+ Base = declarative_base()
+
+ class Data(Base):
+ __table__ = self.tables.data_table
+
+ s = Session(testing.db)
+
+ d1 = Data(name='d1', data=None, nulldata=None)
+ s.add(d1)
+ s.commit()
+
+ s.bulk_insert_mappings(
+ Data, [{"name": "d2", "data": None, "nulldata": None}]
+ )
+ eq_(
+ s.query(
+ cast(self.tables.data_table.c.data, String),
+ cast(self.tables.data_table.c.nulldata, String)
+ ).filter(self.tables.data_table.c.name == 'd1').first(),
+ ("null", None)
+ )
+ eq_(
+ s.query(
+ cast(self.tables.data_table.c.data, String),
+ cast(self.tables.data_table.c.nulldata, String)
+ ).filter(self.tables.data_table.c.name == 'd2').first(),
+ ("null", None)
+ )
+
class JSONBTest(JSONTest):
@@ -2444,7 +2937,6 @@ class JSONBTest(JSONTest):
class JSONBRoundTripTest(JSONRoundTripTest):
- __only_on__ = ('postgresql >= 9.4',)
__requires__ = ('postgresql_jsonb', )
test_type = JSONB
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py
index 17920c127..68fa72b10 100644
--- a/test/dialect/test_sqlite.py
+++ b/test/dialect/test_sqlite.py
@@ -535,29 +535,12 @@ class DialectTest(fixtures.TestBase, AssertsExecutionResults):
assert e.pool.__class__ is pool.NullPool
-
-class AttachedMemoryDBTest(fixtures.TestBase):
+class AttachedDBTest(fixtures.TestBase):
__only_on__ = 'sqlite'
- dbname = None
-
- def setUp(self):
- self.conn = conn = testing.db.connect()
- if self.dbname is None:
- dbname = ':memory:'
- else:
- dbname = self.dbname
- conn.execute('ATTACH DATABASE "%s" AS test_schema' % dbname)
- self.metadata = MetaData()
-
- def tearDown(self):
- self.metadata.drop_all(self.conn)
- self.conn.execute('DETACH DATABASE test_schema')
- if self.dbname:
- os.remove(self.dbname)
-
def _fixture(self):
meta = self.metadata
+ self.conn = testing.db.connect()
ct = Table(
'created', meta,
Column('id', Integer),
@@ -567,6 +550,14 @@ class AttachedMemoryDBTest(fixtures.TestBase):
meta.create_all(self.conn)
return ct
+ def setup(self):
+ self.conn = testing.db.connect()
+ self.metadata = MetaData()
+
+ def teardown(self):
+ self.metadata.drop_all(self.conn)
+ self.conn.close()
+
def test_no_tables(self):
insp = inspect(self.conn)
eq_(insp.get_table_names("test_schema"), [])
@@ -581,6 +572,18 @@ class AttachedMemoryDBTest(fixtures.TestBase):
insp = inspect(self.conn)
eq_(insp.get_table_names("test_schema"), ["created"])
+ def test_schema_names(self):
+ self._fixture()
+ insp = inspect(self.conn)
+ eq_(insp.get_schema_names(), ["main", "test_schema"])
+
+ # implicitly creates a "temp" schema
+ self.conn.execute("select * from sqlite_temp_master")
+
+ # we're not including it
+ insp = inspect(self.conn)
+ eq_(insp.get_schema_names(), ["main", "test_schema"])
+
def test_reflect_system_table(self):
meta = MetaData(self.conn)
alt_master = Table(
@@ -633,10 +636,6 @@ class AttachedMemoryDBTest(fixtures.TestBase):
eq_(row['name'], 'foo')
-class AttachedFileDBTest(AttachedMemoryDBTest):
- dbname = 'attached_db.db'
-
-
class SQLTest(fixtures.TestBase, AssertsCompiledSQL):
"""Tests SQLite-dialect specific compilation."""
diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py
index 451cb8b0e..8551e1fcb 100644
--- a/test/engine/test_pool.py
+++ b/test/engine/test_pool.py
@@ -8,8 +8,9 @@ from sqlalchemy.testing import eq_, assert_raises, is_not_, is_
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.testing import fixtures
import random
-from sqlalchemy.testing.mock import Mock, call, patch
+from sqlalchemy.testing.mock import Mock, call, patch, ANY
import weakref
+import collections
join_timeout = 10
@@ -1480,6 +1481,98 @@ class QueuePoolTest(PoolTestBase):
time.sleep(1.5)
self._assert_cleanup_on_pooled_reconnect(dbapi, p)
+ def test_connect_handler_not_called_for_recycled(self):
+ """test [ticket:3497]"""
+
+ dbapi, p = self._queuepool_dbapi_fixture(
+ pool_size=2, max_overflow=2)
+
+ canary = Mock()
+
+ c1 = p.connect()
+ c2 = p.connect()
+
+ c1.close()
+ c2.close()
+
+ dbapi.shutdown(True)
+
+ bad = p.connect()
+ p._invalidate(bad)
+ bad.close()
+ assert p._invalidate_time
+
+ event.listen(p, "connect", canary.connect)
+ event.listen(p, "checkout", canary.checkout)
+
+ assert_raises(
+ Exception,
+ p.connect
+ )
+
+ p._pool.queue = collections.deque(
+ [
+ c for c in p._pool.queue
+ if c.connection is not None
+ ]
+ )
+
+ dbapi.shutdown(False)
+ c = p.connect()
+ c.close()
+
+ eq_(
+ canary.mock_calls,
+ [
+ call.connect(ANY, ANY),
+ call.checkout(ANY, ANY, ANY)
+ ]
+ )
+
+ def test_connect_checkout_handler_always_gets_info(self):
+ """test [ticket:3497]"""
+
+ dbapi, p = self._queuepool_dbapi_fixture(
+ pool_size=2, max_overflow=2)
+
+ c1 = p.connect()
+ c2 = p.connect()
+
+ c1.close()
+ c2.close()
+
+ dbapi.shutdown(True)
+
+ bad = p.connect()
+ p._invalidate(bad)
+ bad.close()
+ assert p._invalidate_time
+
+ @event.listens_for(p, "connect")
+ def connect(conn, conn_rec):
+ conn_rec.info['x'] = True
+
+ @event.listens_for(p, "checkout")
+ def checkout(conn, conn_rec, conn_f):
+ assert 'x' in conn_rec.info
+
+ assert_raises(
+ Exception,
+ p.connect
+ )
+
+ p._pool.queue = collections.deque(
+ [
+ c for c in p._pool.queue
+ if c.connection is not None
+ ]
+ )
+
+ dbapi.shutdown(False)
+ c = p.connect()
+ c.close()
+
+
def test_error_on_pooled_reconnect_cleanup_wcheckout_event(self):
dbapi, p = self._queuepool_dbapi_fixture(pool_size=1,
max_overflow=2)
diff --git a/test/ext/test_baked.py b/test/ext/test_baked.py
index 78c43fc7e..9534c29e9 100644
--- a/test/ext/test_baked.py
+++ b/test/ext/test_baked.py
@@ -1,6 +1,7 @@
from sqlalchemy.orm import Session, subqueryload, \
mapper, relationship, lazyload, clear_mappers
-from sqlalchemy.testing import eq_, is_, is_not_, assert_raises
+from sqlalchemy.testing import eq_, is_, is_not_
+from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy import testing
from test.orm import _fixtures
from sqlalchemy.ext.baked import BakedQuery, baked_lazyload, BakedLazyLoader
@@ -151,25 +152,67 @@ class LikeQueryTest(BakedTest):
(8, )
)
- def test_one_no_result(self):
+ def test_one_or_none_no_result(self):
User = self.classes.User
bq = self.bakery(lambda s: s.query(User))
bq += lambda q: q.filter(User.name == 'asdf')
+ eq_(
+ bq(Session()).one_or_none(),
+ None
+ )
+
+ def test_one_or_none_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'ed')
+
+ u1 = bq(Session()).one_or_none()
+ eq_(u1.name, 'ed')
+
+ def test_one_or_none_multiple_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name.like('%ed%'))
+
assert_raises(
+ orm_exc.MultipleResultsFound,
+ bq(Session()).one_or_none
+ )
+
+ def test_one_no_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'asdf')
+
+ assert_raises_message(
orm_exc.NoResultFound,
+ "No row was found for one()",
bq(Session()).one
)
+ def test_one_result(self):
+ User = self.classes.User
+
+ bq = self.bakery(lambda s: s.query(User))
+ bq += lambda q: q.filter(User.name == 'ed')
+
+ u1 = bq(Session()).one()
+ eq_(u1.name, 'ed')
+
def test_one_multiple_result(self):
User = self.classes.User
bq = self.bakery(lambda s: s.query(User))
bq += lambda q: q.filter(User.name.like('%ed%'))
- assert_raises(
+ assert_raises_message(
orm_exc.MultipleResultsFound,
+ "Multiple rows were found for one()",
bq(Session()).one
)
diff --git a/test/orm/test_bulk.py b/test/orm/test_bulk.py
index e2a1464a6..878560cf6 100644
--- a/test/orm/test_bulk.py
+++ b/test/orm/test_bulk.py
@@ -2,7 +2,7 @@ from sqlalchemy import testing
from sqlalchemy.testing import eq_
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import fixtures
-from sqlalchemy import Integer, String, ForeignKey
+from sqlalchemy import Integer, String, ForeignKey, FetchedValue
from sqlalchemy.orm import mapper, Session
from sqlalchemy.testing.assertsql import CompiledSQL
from test.orm import _fixtures
@@ -156,6 +156,59 @@ class BulkInsertUpdateTest(BulkTest, _fixtures.FixtureTest):
)
+class BulkUDPostfetchTest(BulkTest, fixtures.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'a', metadata,
+ Column(
+ 'id', Integer,
+ primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer),
+ Column('y', Integer,
+ server_default=FetchedValue(),
+ server_onupdate=FetchedValue()))
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Comparable):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ A = cls.classes.A
+ a = cls.tables.a
+
+ mapper(A, a)
+
+ def test_insert_w_fetch(self):
+ A = self.classes.A
+
+ s = Session()
+ a1 = A(x=1)
+ s.bulk_save_objects([a1])
+ s.commit()
+
+ def test_update_w_fetch(self):
+ A = self.classes.A
+
+ s = Session()
+ a1 = A(x=1, y=2)
+ s.add(a1)
+ s.commit()
+
+ eq_(a1.id, 1) # force a load
+ a1.x = 5
+ s.expire(a1, ['y'])
+ assert 'y' not in a1.__dict__
+ s.bulk_save_objects([a1])
+ s.commit()
+
+ eq_(a1.x, 5)
+ eq_(a1.y, 2)
+
+
class BulkInheritanceTest(BulkTest, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
diff --git a/test/orm/test_events.py b/test/orm/test_events.py
index ae7ba98c1..ab61077ae 100644
--- a/test/orm/test_events.py
+++ b/test/orm/test_events.py
@@ -111,6 +111,43 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest):
event.listen(mapper, meth, evt(meth), **kw)
return canary
+ def test_init_allow_kw_modify(self):
+ User, users = self.classes.User, self.tables.users
+ mapper(User, users)
+
+ @event.listens_for(User, 'init')
+ def add_name(obj, args, kwargs):
+ kwargs['name'] = 'ed'
+
+ u1 = User()
+ eq_(u1.name, 'ed')
+
+ def test_init_failure_hook(self):
+ users = self.tables.users
+
+ class Thing(object):
+ def __init__(self, **kw):
+ if kw.get('fail'):
+ raise Exception("failure")
+
+ mapper(Thing, users)
+
+ canary = Mock()
+ event.listen(Thing, 'init_failure', canary)
+
+ Thing()
+ eq_(canary.mock_calls, [])
+
+ assert_raises_message(
+ Exception,
+ "failure",
+ Thing, fail=True
+ )
+ eq_(
+ canary.mock_calls,
+ [call(ANY, (), {'fail': True})]
+ )
+
def test_listen_doesnt_force_compile(self):
User, users = self.classes.User, self.tables.users
m = mapper(User, users, properties={
@@ -1580,6 +1617,506 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest):
)
+class SessionLifecycleEventsTest(_RemoveListeners, _fixtures.FixtureTest):
+ run_inserts = None
+
+ def _fixture(self, include_address=False):
+ users, User = self.tables.users, self.classes.User
+
+ if include_address:
+ addresses, Address = self.tables.addresses, self.classes.Address
+ mapper(User, users, properties={
+ "addresses": relationship(
+ Address, cascade="all, delete-orphan")
+ })
+ mapper(Address, addresses)
+ else:
+ mapper(User, users)
+
+ listener = Mock()
+
+ sess = Session()
+
+ def start_events():
+ event.listen(
+ sess, "transient_to_pending", listener.transient_to_pending)
+ event.listen(
+ sess, "pending_to_transient", listener.pending_to_transient)
+ event.listen(
+ sess, "persistent_to_transient",
+ listener.persistent_to_transient)
+ event.listen(
+ sess, "pending_to_persistent", listener.pending_to_persistent)
+ event.listen(
+ sess, "detached_to_persistent",
+ listener.detached_to_persistent)
+ event.listen(
+ sess, "loaded_as_persistent", listener.loaded_as_persistent)
+
+ event.listen(
+ sess, "persistent_to_detached",
+ listener.persistent_to_detached)
+ event.listen(
+ sess, "deleted_to_detached", listener.deleted_to_detached)
+
+ event.listen(
+ sess, "persistent_to_deleted", listener.persistent_to_deleted)
+ event.listen(
+ sess, "deleted_to_persistent", listener.deleted_to_persistent)
+ return listener
+
+ if include_address:
+ return sess, User, Address, start_events
+ else:
+ return sess, User, start_events
+
+ def test_transient_to_pending(self):
+ sess, User, start_events = self._fixture()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "transient_to_pending")
+ def trans_to_pending(session, instance):
+ assert instance in session
+ listener.flag_checked(instance)
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.transient_to_pending(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_pending_to_transient_via_rollback(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "pending_to_transient")
+ def test_deleted_flag(session, instance):
+ assert instance not in session
+ listener.flag_checked(instance)
+
+ sess.rollback()
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.pending_to_transient(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_pending_to_transient_via_expunge(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "pending_to_transient")
+ def test_deleted_flag(session, instance):
+ assert instance not in session
+ listener.flag_checked(instance)
+
+ sess.expunge(u1)
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.pending_to_transient(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_pending_to_persistent(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "pending_to_persistent")
+ def test_flag(session, instance):
+ assert instance in session
+ assert instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.key in session.identity_map
+ listener.flag_checked(instance)
+
+ sess.flush()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.pending_to_persistent(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_detached_to_persistent(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ sess.expunge(u1)
+
+ listener = start_events()
+
+ @event.listens_for(sess, "detached_to_persistent")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ listener.flag_checked()
+
+ sess.add(u1)
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.flag_checked()
+ ]
+ )
+
+ def test_loaded_as_persistent(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+ sess.close()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "loaded_as_persistent")
+ def test_identity_flag(session, instance):
+ assert instance in session
+ assert instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.key in session.identity_map
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ u1 = sess.query(User).filter_by(name='u1').one()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.loaded_as_persistent(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_detached_to_persistent_via_deleted(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+ sess.close()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "detached_to_persistent")
+ def test_deleted_flag_persistent(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.dtp_flag_checked(instance)
+
+ @event.listens_for(sess, "persistent_to_deleted")
+ def test_deleted_flag_detached(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ listener.ptd_flag_checked(instance)
+
+ sess.delete(u1)
+ assert u1 in sess.deleted
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.dtp_flag_checked(u1)
+ ]
+ )
+
+ sess.flush()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.dtp_flag_checked(u1),
+ call.persistent_to_deleted(sess, u1),
+ call.ptd_flag_checked(u1),
+ ]
+ )
+
+ def test_detached_to_persistent_via_cascaded_delete(self):
+ sess, User, Address, start_events = self._fixture(include_address=True)
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ a1 = Address(email_address='e1')
+ u1.addresses.append(a1)
+ sess.commit()
+ u1.addresses # ensure u1.addresses refers to a1 before detachment
+ sess.close()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "detached_to_persistent")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ sess.delete(u1)
+ assert u1 in sess.deleted
+ assert a1 in sess.deleted
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.detached_to_persistent(sess, u1),
+ call.flag_checked(u1),
+ call.detached_to_persistent(sess, a1),
+ call.flag_checked(a1),
+ ]
+ )
+
+ sess.flush()
+
+ def test_persistent_to_deleted(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_deleted")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ sess.delete(u1)
+ assert u1 in sess.deleted
+
+ eq_(
+ listener.mock_calls,
+ []
+ )
+
+ sess.flush()
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_deleted(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_persistent_to_detached_via_expunge(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_detached")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ assert u1 in sess
+ sess.expunge(u1)
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_detached(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_persistent_to_detached_via_expunge_all(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_detached")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ assert u1 in sess
+ sess.expunge_all()
+ assert u1 not in sess
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_detached(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_persistent_to_transient_via_rollback(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "persistent_to_transient")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert not instance._sa_instance_state.persistent
+ assert instance._sa_instance_state.transient
+ listener.flag_checked(instance)
+
+ sess.rollback()
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.persistent_to_transient(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_deleted_to_persistent_via_rollback(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+
+ sess.delete(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "deleted_to_persistent")
+ def test_deleted_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance in session
+ assert not instance._sa_instance_state.deleted
+ assert not instance._sa_instance_state.detached
+ assert instance._sa_instance_state.persistent
+ listener.flag_checked(instance)
+
+ assert u1 not in sess
+ assert u1._sa_instance_state.deleted
+ assert not u1._sa_instance_state.persistent
+ assert not u1._sa_instance_state.detached
+
+ sess.rollback()
+
+ assert u1 in sess
+ assert u1._sa_instance_state.persistent
+ assert not u1._sa_instance_state.deleted
+ assert not u1._sa_instance_state.detached
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.deleted_to_persistent(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+ def test_deleted_to_detached_via_commit(self):
+ sess, User, start_events = self._fixture()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+ sess.commit()
+
+ sess.delete(u1)
+ sess.flush()
+
+ listener = start_events()
+
+ @event.listens_for(sess, "deleted_to_detached")
+ def test_detached_flag(session, instance):
+ assert instance not in session.deleted
+ assert instance not in session
+ assert not instance._sa_instance_state.deleted
+ assert instance._sa_instance_state.detached
+ listener.flag_checked(instance)
+
+ assert u1 not in sess
+ assert u1._sa_instance_state.deleted
+ assert not u1._sa_instance_state.persistent
+ assert not u1._sa_instance_state.detached
+
+ sess.commit()
+
+ assert u1 not in sess
+ assert not u1._sa_instance_state.deleted
+ assert u1._sa_instance_state.detached
+
+ eq_(
+ listener.mock_calls,
+ [
+ call.deleted_to_detached(sess, u1),
+ call.flag_checked(u1)
+ ]
+ )
+
+
class MapperExtensionTest(_fixtures.FixtureTest):
"""Superseded by MapperEventsTest - test backwards
diff --git a/test/orm/test_hasparent.py b/test/orm/test_hasparent.py
index fd246b527..df4b05980 100644
--- a/test/orm/test_hasparent.py
+++ b/test/orm/test_hasparent.py
@@ -116,7 +116,7 @@ class ParentRemovalTest(fixtures.MappedTest):
User = self.classes.User
s, u1, a1 = self._fixture()
- s._expunge_state(attributes.instance_state(u1))
+ s._expunge_states([attributes.instance_state(u1)])
del u1
gc_collect()
@@ -178,7 +178,7 @@ class ParentRemovalTest(fixtures.MappedTest):
u2 = User(addresses=[a1])
s.add(u2)
s.flush()
- s._expunge_state(attributes.instance_state(u2))
+ s._expunge_states([attributes.instance_state(u2)])
del u2
gc_collect()
diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py
index ea39753b4..f2e1db2da 100644
--- a/test/orm/test_lazy_relations.py
+++ b/test/orm/test_lazy_relations.py
@@ -1073,3 +1073,78 @@ class RefersToSelfLazyLoadInterferenceTest(fixtures.MappedTest):
session.query(B).options(
sa.orm.joinedload('parent').joinedload('zc')).all()
+
+class TypeCoerceTest(fixtures.MappedTest, testing.AssertsExecutionResults,):
+ """ORM-level test for [ticket:3531]"""
+
+ # mysql is having a recursion issue in the bind_expression
+ __only_on__ = ('sqlite', 'postgresql')
+
+ class StringAsInt(TypeDecorator):
+ impl = String(50)
+
+ def column_expression(self, col):
+ return sa.cast(col, Integer)
+
+ def bind_expression(self, col):
+ return sa.cast(col, String)
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'person', metadata,
+ Column("id", cls.StringAsInt, primary_key=True),
+ )
+ Table(
+ "pets", metadata,
+ Column("id", Integer, primary_key=True),
+ Column("person_id", Integer),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Person(cls.Basic):
+ pass
+
+ class Pet(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ mapper(cls.classes.Person, cls.tables.person, properties=dict(
+ pets=relationship(
+ cls.classes.Pet, primaryjoin=(
+ orm.foreign(cls.tables.pets.c.person_id) ==
+ sa.cast(
+ sa.type_coerce(cls.tables.person.c.id, Integer),
+ Integer
+ )
+ )
+ )
+ ))
+
+ mapper(cls.classes.Pet, cls.tables.pets)
+
+ def test_lazyload_singlecast(self):
+ Person = self.classes.Person
+ Pet = self.classes.Pet
+
+ s = Session()
+ s.add_all([
+ Person(id=5), Pet(id=1, person_id=5)
+ ])
+ s.commit()
+
+ p1 = s.query(Person).first()
+
+ with self.sql_execution_asserter() as asserter:
+ p1.pets
+
+ asserter.assert_(
+ CompiledSQL(
+ "SELECT pets.id AS pets_id, pets.person_id "
+ "AS pets_person_id FROM pets "
+ "WHERE pets.person_id = CAST(:param_1 AS INTEGER)",
+ [{'param_1': 5}]
+ )
+ )
diff --git a/test/orm/test_load_on_fks.py b/test/orm/test_load_on_fks.py
index 813d8d17a..471c8665a 100644
--- a/test/orm/test_load_on_fks.py
+++ b/test/orm/test_load_on_fks.py
@@ -301,7 +301,8 @@ class LoadOnFKsTest(AssertsExecutionResults, fixtures.TestBase):
c2 = Child()
if attach:
- sess._attach(instance_state(c2))
+ state = instance_state(c2)
+ state.session_id = sess.hash_key
if enable_relationship_rel:
sess.enable_relationship_loading(c2)
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 264b386d4..6845ababb 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -8,7 +8,7 @@ from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.engine import default
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, class_mapper, configure_mappers, reconstructor, \
- validates, aliased, defer, deferred, synonym, attributes, \
+ aliased, deferred, synonym, attributes, \
column_property, composite, dynamic_loader, \
comparable_property, Session
from sqlalchemy.orm.persistence import _sort_states
@@ -19,6 +19,7 @@ from sqlalchemy.testing.assertsql import CompiledSQL
import logging
import logging.handlers
+
class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -26,33 +27,34 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""A backref name may not shadow an existing property name."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users,
- properties={
- 'addresses':relationship(Address, backref='email_address')
- })
+ properties={
+ 'addresses': relationship(Address, backref='email_address')
+ })
assert_raises(sa.exc.ArgumentError, sa.orm.configure_mappers)
def test_update_attr_keys(self):
- """test that update()/insert() use the correct key when given InstrumentedAttributes."""
+ """test that update()/insert() use the correct key when given
+ InstrumentedAttributes."""
User, users = self.classes.User, self.tables.users
-
mapper(User, users, properties={
- 'foobar':users.c.name
+ 'foobar': users.c.name
})
- users.insert().values({User.foobar:'name1'}).execute()
- eq_(sa.select([User.foobar]).where(User.foobar=='name1').execute().fetchall(), [('name1',)])
+ users.insert().values({User.foobar: 'name1'}).execute()
+ eq_(sa.select([User.foobar]).where(User.foobar == 'name1').
+ execute().fetchall(), [('name1',)])
- users.update().values({User.foobar:User.foobar + 'foo'}).execute()
- eq_(sa.select([User.foobar]).where(User.foobar=='name1foo').execute().fetchall(), [('name1foo',)])
+ users.update().values({User.foobar: User.foobar + 'foo'}).execute()
+ eq_(sa.select([User.foobar]).where(User.foobar == 'name1foo').
+ execute().fetchall(), [('name1foo',)])
def test_utils(self):
users = self.tables.users
@@ -63,12 +65,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Foo(object):
x = "something"
+
@property
def y(self):
return "something else"
-
- m = mapper(Foo, users, properties={"addresses":relationship(Address)})
+ m = mapper(Foo, users, properties={"addresses": relationship(Address)})
mapper(Address, addresses)
a1 = aliased(Foo)
@@ -100,14 +102,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Foo(object):
x = "something"
+
@property
def y(self):
return "something else"
m = mapper(Foo, users)
a1 = aliased(Foo)
- f = Foo()
-
for arg, key, ret in [
(m, "x", Foo.x),
(Foo, "x", Foo.x),
@@ -122,7 +123,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def boom():
raise Exception("it broke")
mapper(User, users, properties={
- 'addresses':relationship(boom)
+ 'addresses': relationship(boom)
})
# test that QueryableAttribute.__str__() doesn't
@@ -137,12 +138,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""
Address, addresses, User = (self.classes.Address,
- self.tables.addresses,
- self.classes.User)
-
+ self.tables.addresses,
+ self.classes.User)
mapper(Address, addresses, properties={
- 'user':relationship(User)
+ 'user': relationship(User)
})
try:
@@ -156,8 +156,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"initialize - can't proceed with "
"initialization of other mappers. "
"Original exception was: Class "
- "'test.orm._fixtures.User' is not mapped$"
- , configure_mappers)
+ "'test.orm._fixtures.User' is not mapped$",
+ configure_mappers)
def test_column_prefix(self):
users, User = self.tables.users, self.classes.User
@@ -169,7 +169,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
s = create_session()
u = s.query(User).get(7)
eq_(u._name, 'jack')
- eq_(u._id,7)
+ eq_(u._id, 7)
u2 = s.query(User).filter_by(user_name='jack').one()
assert u is u2
@@ -190,16 +190,16 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
still triggers a check against all mappers."""
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
sa.orm.configure_mappers()
assert sa.orm.mapperlib.Mapper._new_mappers is False
m = mapper(Address, addresses, properties={
- 'user': relationship(User, backref="addresses")})
+ 'user': relationship(User, backref="addresses")})
assert m.configured is False
assert sa.orm.mapperlib.Mapper._new_mappers is True
@@ -232,13 +232,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_column_not_present(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
assert_raises_message(sa.exc.ArgumentError,
"not represented in the mapper's table",
- mapper, User, users, properties={'foo'
- : addresses.c.user_id})
+ mapper, User, users,
+ properties={'foo': addresses.c.user_id})
def test_constructor_exc(self):
"""TypeError is raised for illegal constructor args,
@@ -246,10 +246,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users, addresses = self.tables.users, self.tables.addresses
-
class Foo(object):
+
def __init__(self):
pass
+
class Bar(object):
pass
@@ -266,13 +267,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""
class Foo(object):
+
def __init__(self, id):
self.id = id
m = MetaData()
foo_t = Table('foo', m,
- Column('id', String, primary_key=True)
- )
+ Column('id', String, primary_key=True)
+ )
m = mapper(Foo, foo_t)
+
class DontCompareMeToString(int):
if util.py2k:
def __lt__(self, other):
@@ -292,24 +295,23 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
[states[4], states[3], states[0], states[1], states[2]]
)
-
def test_props(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- m = mapper(User, users, properties = {
- 'addresses' : relationship(mapper(Address, addresses))
+ m = mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))
})
assert User.addresses.property is m.get_property('addresses')
def test_unicode_relationship_backref_names(self):
# test [ticket:2901]
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties={
@@ -322,56 +324,62 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_configure_on_prop_1(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- mapper(User, users, properties = {
- 'addresses' : relationship(mapper(Address, addresses))
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))
})
- User.addresses.any(Address.email_address=='foo@bar.com')
+ User.addresses.any(Address.email_address == 'foo@bar.com')
def test_configure_on_prop_2(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- mapper(User, users, properties = {
- 'addresses' : relationship(mapper(Address, addresses))
+ mapper(User, users, properties={
+ 'addresses': relationship(mapper(Address, addresses))
})
- eq_(str(User.id == 3), str(users.c.id==3))
+ eq_(str(User.id == 3), str(users.c.id == 3))
def test_configure_on_prop_3(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
+
+ class Foo(User):
+ pass
- class Foo(User):pass
mapper(User, users)
mapper(Foo, addresses, inherits=User, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
assert getattr(Foo().__class__, 'name').impl is not None
def test_deferred_subclass_attribute_instrument(self):
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
+ self.tables.addresses,
+ self.classes.User)
+
+ class Foo(User):
+ pass
- class Foo(User):pass
mapper(User, users)
configure_mappers()
mapper(Foo, addresses, inherits=User, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
assert getattr(Foo().__class__, 'name').impl is not None
def test_check_descriptor_as_method(self):
User, users = self.classes.User, self.tables.users
m = mapper(User, users)
+
class MyClass(User):
+
def foo(self):
pass
m._is_userland_descriptor(MyClass.foo)
@@ -379,7 +387,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_configure_on_get_props_1(self):
User, users = self.classes.User, self.tables.users
- m =mapper(User, users)
+ m = mapper(User, users)
assert not m.configured
assert list(m.iterate_properties)
assert m.configured
@@ -387,29 +395,30 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_configure_on_get_props_2(self):
User, users = self.classes.User, self.tables.users
- m= mapper(User, users)
+ m = mapper(User, users)
assert not m.configured
assert m.get_property('name')
assert m.configured
def test_configure_on_get_props_3(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
- m= mapper(User, users)
+ m = mapper(User, users)
assert not m.configured
configure_mappers()
m2 = mapper(Address, addresses, properties={
- 'user':relationship(User, backref='addresses')
- })
+ 'user': relationship(User, backref='addresses')
+ })
assert m.get_property('addresses')
def test_info(self):
users = self.tables.users
Address = self.classes.Address
+
class MyComposite(object):
pass
for constructor, args in [
@@ -434,17 +443,17 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# create specific tables here as we don't want
# users.c.id.info to be pre-initialized
users = Table('u', m, Column('id', Integer, primary_key=True),
- Column('name', String))
+ Column('name', String))
addresses = Table('a', m, Column('id', Integer, primary_key=True),
- Column('name', String),
- Column('user_id', Integer, ForeignKey('u.id')))
+ Column('name', String),
+ Column('user_id', Integer, ForeignKey('u.id')))
Address = self.classes.Address
User = self.classes.User
mapper(User, users, properties={
- "name_lower": column_property(func.lower(users.c.name)),
- "addresses": relationship(Address)
- })
+ "name_lower": column_property(func.lower(users.c.name)),
+ "addresses": relationship(Address)
+ })
mapper(Address, addresses)
# attr.info goes down to the original Column object
@@ -460,18 +469,19 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# same for relationships
is_(User.addresses.info, User.addresses.property.info)
-
def test_add_property(self):
users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
+ self.tables.addresses,
+ self.classes.Address)
assert_col = []
class User(fixtures.ComparableEntity):
+
def _get_name(self):
assert_col.append(('get', self._name))
return self._name
+
def _set_name(self, name):
assert_col.append(('set', name))
self._name = name
@@ -503,7 +513,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
m.add_property('addresses', relationship(Address))
m.add_property('uc_name', sa.orm.comparable_property(UCComparator))
m.add_property('uc_name2', sa.orm.comparable_property(
- UCComparator, User.uc_name2))
+ UCComparator, User.uc_name2))
sess = create_session(autocommit=False)
assert sess.query(User).get(7)
@@ -534,7 +544,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
User()
m2 = mapper(Address, addresses, properties={
- 'user':relationship(User, backref="addresses")
+ 'user': relationship(User, backref="addresses")
})
# configure mappers takes place when User is generated
User()
@@ -545,7 +555,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users, User = self.tables.users, self.classes.User
m = mapper(User, users)
- m.add_property('_name',users.c.name)
+ m.add_property('_name', users.c.name)
m.add_property('name', synonym('_name'))
sess = create_session()
@@ -572,8 +582,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
addresses, Address = self.tables.addresses, self.classes.Address
m = mapper(User, users, properties={
- "addresses": relationship(Address)
- })
+ "addresses": relationship(Address)
+ })
mapper(Address, addresses)
assert_raises_message(
@@ -588,14 +598,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_add_column_prop_deannotate(self):
User, users = self.classes.User, self.tables.users
Address, addresses = self.classes.Address, self.tables.addresses
+
class SubUser(User):
pass
m = mapper(User, users)
m2 = mapper(SubUser, addresses, inherits=User, properties={
- 'address_id': addresses.c.id
- })
+ 'address_id': addresses.c.id
+ })
m3 = mapper(Address, addresses, properties={
- 'foo':relationship(m2)
+ 'foo': relationship(m2)
})
# add property using annotated User.name,
# needs to be deannotated
@@ -612,7 +623,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"addresses_1.email_address AS "
"addresses_1_email_address, "
"users_1.name || :name_1 AS anon_1 "
- "FROM addresses JOIN (users AS users_1 JOIN addresses AS addresses_1 ON users_1.id = "
+ "FROM addresses JOIN (users AS users_1 JOIN addresses "
+ "AS addresses_1 ON users_1.id = "
"addresses_1.user_id) ON "
"users_1.id = addresses.user_id"
)
@@ -638,20 +650,23 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert User.y.property.columns[0] is not expr2
assert User.y.property.columns[0].element.\
- _raw_columns[0] is users.c.name
+ _raw_columns[0] is users.c.name
assert User.y.property.columns[0].element.\
- _raw_columns[1] is users.c.id
+ _raw_columns[1] is users.c.id
def test_synonym_replaces_backref(self):
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.users,
+ self.classes.User)
assert_calls = []
+
class Address(object):
+
def _get_user(self):
assert_calls.append("get")
return self._user
+
def _set_user(self, user):
assert_calls.append("set")
self._user = user
@@ -659,20 +674,20 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# synonym is created against nonexistent prop
mapper(Address, addresses, properties={
- 'user':synonym('_user')
+ 'user': synonym('_user')
})
sa.orm.configure_mappers()
# later, backref sets up the prop
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='_user')
+ 'addresses': relationship(Address, backref='_user')
})
sess = create_session()
u1 = sess.query(User).get(7)
u2 = sess.query(User).get(8)
# comparaison ops need to work
- a1 = sess.query(Address).filter(Address.user==u1).one()
+ a1 = sess.query(Address).filter(Address.user == u1).one()
eq_(a1.id, 1)
a1.user = u2
assert a1.user is u2
@@ -680,16 +695,19 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_self_ref_synonym(self):
t = Table('nodes', MetaData(),
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('parent_id', Integer, ForeignKey('nodes.id')))
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('nodes.id')))
class Node(object):
pass
mapper(Node, t, properties={
- '_children':relationship(Node, backref=backref('_parent', remote_side=t.c.id)),
- 'children':synonym('_children'),
- 'parent':synonym('_parent')
+ '_children': relationship(
+ Node, backref=backref('_parent', remote_side=t.c.id)),
+ 'children': synonym('_children'),
+ 'parent': synonym('_parent')
})
n1 = Node()
@@ -702,13 +720,14 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_non_primary_identity_class(self):
User = self.classes.User
users, addresses = self.tables.users, self.tables.addresses
+
class AddressUser(User):
pass
m1 = mapper(User, users, polymorphic_identity='user')
m2 = mapper(AddressUser, addresses, inherits=User,
- polymorphic_identity='address', properties={
- 'address_id': addresses.c.id
- })
+ polymorphic_identity='address', properties={
+ 'address_id': addresses.c.id
+ })
m3 = mapper(AddressUser, addresses, non_primary=True)
assert m3._identity_class is m2._identity_class
eq_(
@@ -719,6 +738,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_reassign_polymorphic_identity_warns(self):
User = self.classes.User
users = self.tables.users
+
class MyUser(User):
pass
m1 = mapper(User, users, polymorphic_on=users.c.name,
@@ -730,17 +750,16 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
MyUser, users, inherits=User, polymorphic_identity='user'
)
-
def test_illegal_non_primary(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users)
mapper(Address, addresses)
mapper(User, users, non_primary=True, properties={
- 'addresses':relationship(Address)
+ 'addresses': relationship(Address)
})
assert_raises_message(
sa.exc.ArgumentError,
@@ -762,62 +781,90 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Base(object):
pass
+
class Sub(Base):
pass
mapper(Base, users)
assert_raises_message(sa.exc.InvalidRequestError,
- "Configure a primary mapper first",
- mapper, Sub, addresses, non_primary=True
- )
+ "Configure a primary mapper first",
+ mapper, Sub, addresses, non_primary=True
+ )
def test_prop_filters(self):
t = Table('person', MetaData(),
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('type', String(128)),
Column('name', String(128)),
Column('employee_number', Integer),
Column('boss_id', Integer, ForeignKey('person.id')),
Column('vendor_id', Integer))
- class Person(object): pass
- class Vendor(Person): pass
- class Employee(Person): pass
- class Manager(Employee): pass
- class Hoho(object): pass
- class Lala(object): pass
- class Fub(object):pass
- class Frob(object):pass
+ class Person(object):
+ pass
+
+ class Vendor(Person):
+ pass
+
+ class Employee(Person):
+ pass
+
+ class Manager(Employee):
+ pass
+
+ class Hoho(object):
+ pass
+
+ class Lala(object):
+ pass
+
+ class Fub(object):
+ pass
+
+ class Frob(object):
+ pass
+
class HasDef(object):
+
def name(self):
pass
- class Empty(object):pass
- empty = mapper(Empty, t, properties={'empty_id' : t.c.id},
- include_properties=[])
+ class Empty(object):
+ pass
+
+ mapper(
+ Empty, t, properties={'empty_id': t.c.id},
+ include_properties=[])
p_m = mapper(Person, t, polymorphic_on=t.c.type,
include_properties=('id', 'type', 'name'))
e_m = mapper(Employee, inherits=p_m,
- polymorphic_identity='employee', properties={'boss'
- : relationship(Manager, backref=backref('peon'),
- remote_side=t.c.id)},
+ polymorphic_identity='employee',
+ properties={
+ 'boss': relationship(
+ Manager, backref=backref('peon'),
+ remote_side=t.c.id)},
exclude_properties=('vendor_id', ))
- m_m = mapper(Manager, inherits=e_m, polymorphic_identity='manager',
- include_properties=('id', 'type'))
+ mapper(
+ Manager, inherits=e_m, polymorphic_identity='manager',
+ include_properties=('id', 'type'))
- v_m = mapper(Vendor, inherits=p_m, polymorphic_identity='vendor',
- exclude_properties=('boss_id', 'employee_number'))
- h_m = mapper(Hoho, t, include_properties=('id', 'type', 'name'))
- l_m = mapper(Lala, t, exclude_properties=('vendor_id', 'boss_id'),
- column_prefix="p_")
+ mapper(
+ Vendor, inherits=p_m, polymorphic_identity='vendor',
+ exclude_properties=('boss_id', 'employee_number'))
+ mapper(Hoho, t, include_properties=('id', 'type', 'name'))
+ mapper(
+ Lala, t, exclude_properties=('vendor_id', 'boss_id'),
+ column_prefix="p_")
- hd_m = mapper(HasDef, t, column_prefix="h_")
+ mapper(HasDef, t, column_prefix="h_")
- fb_m = mapper(Fub, t, include_properties=(t.c.id, t.c.type))
- frb_m = mapper(Frob, t, column_prefix='f_',
- exclude_properties=(t.c.boss_id,
- 'employee_number', t.c.vendor_id))
+ mapper(Fub, t, include_properties=(t.c.id, t.c.type))
+ mapper(
+ Frob, t, column_prefix='f_',
+ exclude_properties=(
+ t.c.boss_id,
+ 'employee_number', t.c.vendor_id))
configure_mappers()
@@ -832,13 +879,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(have, want)
assert_props(HasDef, ['h_boss_id', 'h_employee_number', 'h_id',
- 'name', 'h_name', 'h_vendor_id', 'h_type'])
+ 'name', 'h_name', 'h_vendor_id', 'h_type'])
assert_props(Person, ['id', 'name', 'type'])
assert_instrumented(Person, ['id', 'name', 'type'])
assert_props(Employee, ['boss', 'boss_id', 'employee_number',
'id', 'name', 'type'])
- assert_instrumented(Employee,['boss', 'boss_id', 'employee_number',
- 'id', 'name', 'type'])
+ assert_instrumented(Employee, ['boss', 'boss_id', 'employee_number',
+ 'id', 'name', 'type'])
assert_props(Manager, ['boss', 'boss_id', 'employee_number', 'peon',
'id', 'name', 'type'])
@@ -851,7 +898,6 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert_props(Fub, ['id', 'type'])
assert_props(Frob, ['f_id', 'f_type', 'f_name', ])
-
# putting the discriminator column in exclude_properties,
# very weird. As of 0.7.4 this re-maps it.
class Foo(Person):
@@ -869,10 +915,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_prop_filters_defaults(self):
metadata = self.metadata
t = Table('t', metadata,
- Column('id', Integer(), primary_key=True, test_needs_autoincrement=True),
- Column('x', Integer(), nullable=False, server_default='0')
- )
+ Column(
+ 'id', Integer(), primary_key=True,
+ test_needs_autoincrement=True),
+ Column('x', Integer(), nullable=False, server_default='0')
+ )
t.create()
+
class A(object):
pass
mapper(A, t, include_properties=['id'])
@@ -882,6 +931,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_we_dont_call_bool(self):
class NoBoolAllowed(object):
+
def __bool__(self):
raise Exception("nope")
mapper(NoBoolAllowed, self.tables.users)
@@ -894,6 +944,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_we_dont_call_eq(self):
class NoEqAllowed(object):
+
def __eq__(self, other):
raise Exception("nope")
@@ -901,7 +952,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
Address = self.classes.Address
mapper(NoEqAllowed, users, properties={
- 'addresses':relationship(Address, backref='user')
+ 'addresses': relationship(Address, backref='user')
})
mapper(Address, addresses)
@@ -919,9 +970,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Test implicit merging of two cols raises."""
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.users,
+ self.classes.User)
usersaddresses = sa.join(users, addresses,
users.c.id == addresses.c.user_id)
@@ -935,14 +985,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to a join"""
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
-
+ self.tables.addresses,
+ self.tables.users)
usersaddresses = sa.join(users, addresses, users.c.id
== addresses.c.user_id)
mapper(User, usersaddresses, primary_key=[users.c.id],
- properties={'add_id':addresses.c.id}
+ properties={'add_id': addresses.c.id}
)
l = create_session().query(User).order_by(users.c.id).all()
eq_(l, self.static.user_result[:3])
@@ -951,9 +1000,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to a join"""
User, addresses, users = (self.classes.User,
- self.tables.addresses,
- self.tables.users)
-
+ self.tables.addresses,
+ self.tables.users)
usersaddresses = sa.join(users, addresses, users.c.id
== addresses.c.user_id)
@@ -965,13 +1013,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_mapping_to_join_no_pk(self):
email_bounces, addresses, Address = (self.tables.email_bounces,
- self.tables.addresses,
- self.classes.Address)
+ self.tables.addresses,
+ self.classes.Address)
m = mapper(Address,
- addresses.join(email_bounces),
- properties={'id':[addresses.c.id, email_bounces.c.id]}
- )
+ addresses.join(email_bounces),
+ properties={'id': [addresses.c.id, email_bounces.c.id]}
+ )
configure_mappers()
assert addresses in m._pks_by_table
assert email_bounces not in m._pks_by_table
@@ -988,10 +1036,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to an outer join with a nullable composite primary key."""
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
-
-
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users.outerjoin(addresses),
primary_key=[users.c.id, addresses.c.id],
@@ -1013,13 +1059,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""test the allow_partial_pks=False flag."""
users, addresses, User = (self.tables.users,
- self.tables.addresses,
- self.classes.User)
-
-
+ self.tables.addresses,
+ self.classes.User)
mapper(User, users.outerjoin(addresses),
- allow_partial_pks=False,
+ allow_partial_pks=False,
primary_key=[users.c.id, addresses.c.id],
properties=dict(
address_id=addresses.c.id))
@@ -1037,11 +1081,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_scalar_pk_arg(self):
users, Keyword, items, Item, User, keywords = (self.tables.users,
- self.classes.Keyword,
- self.tables.items,
- self.classes.Item,
- self.classes.User,
- self.tables.keywords)
+ self.classes.Keyword,
+ self.tables.items,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.keywords)
m1 = mapper(Item, items, primary_key=[items.c.id])
m2 = mapper(Keyword, keywords, primary_key=keywords.c.id)
@@ -1051,18 +1095,17 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert m2.primary_key[0] is keywords.c.id
assert m3.primary_key[0] is users.c.id
-
def test_custom_join(self):
"""select_from totally replace the FROM parameters."""
- users, items, order_items, orders, Item, User, Order = (self.tables.users,
- self.tables.items,
- self.tables.order_items,
- self.tables.orders,
- self.classes.Item,
- self.classes.User,
- self.classes.Order)
-
+ users, items, order_items, orders, Item, User, Order = (
+ self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.tables.orders,
+ self.classes.Item,
+ self.classes.User,
+ self.classes.Order)
mapper(Item, items)
@@ -1086,18 +1129,24 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(User, users, order_by=users.c.name.desc())
- assert "order by users.name desc" in str(create_session().query(User).statement).lower()
- assert "order by" not in str(create_session().query(User).order_by(None).statement).lower()
- assert "order by users.name asc" in str(create_session().query(User).order_by(User.name.asc()).statement).lower()
+ assert "order by users.name desc" in \
+ str(create_session().query(User).statement).lower()
+ assert "order by" not in \
+ str(create_session().query(User).order_by(None).statement).lower()
+ assert "order by users.name asc" in \
+ str(create_session().query(User).order_by(
+ User.name.asc()).statement).lower()
eq_(
create_session().query(User).all(),
- [User(id=7, name='jack'), User(id=9, name='fred'), User(id=8, name='ed'), User(id=10, name='chuck')]
+ [User(id=7, name='jack'), User(id=9, name='fred'),
+ User(id=8, name='ed'), User(id=10, name='chuck')]
)
eq_(
create_session().query(User).order_by(User.name).all(),
- [User(id=10, name='chuck'), User(id=8, name='ed'), User(id=9, name='fred'), User(id=7, name='jack')]
+ [User(id=10, name='chuck'), User(id=8, name='ed'),
+ User(id=9, name='fred'), User(id=7, name='jack')]
)
# 'Raises a "expression evaluation not supported" error at prepare time
@@ -1106,9 +1155,8 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Mapping to a SELECT statement that has functions in it."""
addresses, users, User = (self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.users,
+ self.classes.User)
s = sa.select([users,
(users.c.id * 2).label('concat'),
@@ -1129,29 +1177,29 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
User, users = self.classes.User, self.tables.users
-
mapper(User, users)
session = create_session()
q = session.query(User)
eq_(q.count(), 4)
- eq_(q.filter(User.id.in_([8,9])).count(), 2)
- eq_(q.filter(users.c.id.in_([8,9])).count(), 2)
+ eq_(q.filter(User.id.in_([8, 9])).count(), 2)
+ eq_(q.filter(users.c.id.in_([8, 9])).count(), 2)
eq_(session.query(User.id).count(), 4)
eq_(session.query(User.id).filter(User.id.in_((8, 9))).count(), 2)
def test_many_to_many_count(self):
- keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
- self.tables.items,
- self.tables.item_keywords,
- self.classes.Keyword,
- self.classes.Item)
+ keywords, items, item_keywords, Keyword, Item = (
+ self.tables.keywords,
+ self.tables.items,
+ self.tables.item_keywords,
+ self.classes.Keyword,
+ self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
- keywords = relationship(Keyword, item_keywords, lazy='select')))
+ keywords=relationship(Keyword, item_keywords, lazy='select')))
session = create_session()
q = (session.query(Item).
@@ -1164,9 +1212,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""Overriding a column raises an error."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
def go():
mapper(User, users,
@@ -1179,10 +1227,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""exclude_properties cancels the error."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
-
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users,
exclude_properties=['name'],
@@ -1195,9 +1242,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
"""The column being named elsewhere also cancels the error,"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users,
properties=dict(
@@ -1206,28 +1253,30 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_synonym(self):
users, addresses, Address = (self.tables.users,
- self.tables.addresses,
- self.classes.Address)
-
+ self.tables.addresses,
+ self.classes.Address)
assert_col = []
+
class extendedproperty(property):
attribute = 123
class User(object):
+
def _get_name(self):
assert_col.append(('get', self.name))
return self.name
+
def _set_name(self, name):
assert_col.append(('set', name))
self.name = name
uname = extendedproperty(_get_name, _set_name)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select'),
- uname = synonym('name'),
- adlist = synonym('addresses'),
- adname = synonym('addresses')
+ addresses=relationship(mapper(Address, addresses), lazy='select'),
+ uname=synonym('name'),
+ adlist=synonym('addresses'),
+ adname=synonym('addresses')
))
# ensure the synonym can get at the proxied comparators without
@@ -1251,7 +1300,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
row = sess.query(User.id, User.uname).first()
assert row.uname == row[1]
- u = sess.query(User).filter(User.uname=='jack').one()
+ u = sess.query(User).filter(User.uname == 'jack').one()
fixture = self.static.user_address_result[0].addresses
eq_(u.adlist, fixture)
@@ -1274,25 +1323,24 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(User.uname.attribute, 123)
def test_synonym_of_synonym(self):
- users, User = (self.tables.users,
- self.classes.User)
+ users, User = (self.tables.users,
+ self.classes.User)
mapper(User, users, properties={
- 'x':synonym('id'),
- 'y':synonym('x')
+ 'x': synonym('id'),
+ 'y': synonym('x')
})
s = Session()
- u = s.query(User).filter(User.y==8).one()
+ u = s.query(User).filter(User.y == 8).one()
eq_(u.y, 8)
-
def test_synonym_column_location(self):
users, User = self.tables.users, self.classes.User
def go():
mapper(User, users, properties={
- 'not_name':synonym('_name', map_column=True)})
+ 'not_name': synonym('_name', map_column=True)})
assert_raises_message(
sa.exc.ArgumentError,
@@ -1301,28 +1349,30 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
go)
def test_column_synonyms(self):
- """Synonyms which automatically instrument properties, set up aliased column, etc."""
+ """Synonyms which automatically instrument properties,
+ set up aliased column, etc."""
addresses, users, Address = (self.tables.addresses,
- self.tables.users,
- self.classes.Address)
-
-
+ self.tables.users,
+ self.classes.Address)
assert_col = []
+
class User(object):
+
def _get_name(self):
assert_col.append(('get', self._name))
return self._name
+
def _set_name(self, name):
assert_col.append(('set', name))
self._name = name
name = property(_get_name, _set_name)
mapper(Address, addresses)
- mapper(User, users, properties = {
- 'addresses':relationship(Address, lazy='select'),
- 'name':synonym('_name', map_column=True)
+ mapper(User, users, properties={
+ 'addresses': relationship(Address, lazy='select'),
+ 'name': synonym('_name', map_column=True)
})
# test compile
@@ -1369,6 +1419,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
return "method1"
from sqlalchemy.orm.properties import ColumnProperty
+
class UCComparator(ColumnProperty.Comparator):
__hash__ = None
@@ -1388,6 +1439,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def map_(with_explicit_property):
class User(object):
+
@extendedproperty
def uc_name(self):
if self.name is None:
@@ -1398,7 +1450,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
else:
args = (UCComparator,)
mapper(User, users, properties=dict(
- uc_name = sa.orm.comparable_property(*args)))
+ uc_name=sa.orm.comparable_property(*args)))
return User
for User in (map_(True), map_(False)):
@@ -1415,12 +1467,13 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
assert_raises_message(
AttributeError,
"Neither 'extendedproperty' object nor 'UCComparator' "
- "object associated with User.uc_name has an attribute 'nonexistent'",
+ "object associated with User.uc_name has an attribute "
+ "'nonexistent'",
getattr, User.uc_name, 'nonexistent')
# test compile
assert not isinstance(User.uc_name == 'jack', bool)
- u = q.filter(User.uc_name=='JACK').one()
+ u = q.filter(User.uc_name == 'JACK').one()
assert u.uc_name == "JACK"
assert u not in sess.dirty
@@ -1447,10 +1500,11 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class MyComparator(sa.orm.properties.ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
# lower case comparison
return func.lower(self.__clause_element__()
- ) == func.lower(other)
+ ) == func.lower(other)
def intersects(self, other):
# non-standard comparator
@@ -1458,7 +1512,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(User, users, properties={
'name': sa.orm.column_property(users.c.name,
- comparator_factory=MyComparator)
+ comparator_factory=MyComparator)
})
assert_raises_message(
@@ -1470,39 +1524,41 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
eq_(
str((User.name == 'ed').compile(
- dialect=sa.engine.default.DefaultDialect())),
+ dialect=sa.engine.default.DefaultDialect())),
"lower(users.name) = lower(:lower_1)")
eq_(
str((User.name.intersects('ed')).compile(
- dialect=sa.engine.default.DefaultDialect())),
+ dialect=sa.engine.default.DefaultDialect())),
"users.name &= :name_1")
-
def test_reentrant_compile(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
class MyFakeProperty(sa.orm.properties.ColumnProperty):
+
def post_instrument_class(self, mapper):
super(MyFakeProperty, self).post_instrument_class(mapper)
configure_mappers()
m1 = mapper(User, users, properties={
- 'name':MyFakeProperty(users.c.name)
+ 'name': MyFakeProperty(users.c.name)
})
m2 = mapper(Address, addresses)
configure_mappers()
sa.orm.clear_mappers()
+
class MyFakeProperty(sa.orm.properties.ColumnProperty):
+
def post_instrument_class(self, mapper):
super(MyFakeProperty, self).post_instrument_class(mapper)
configure_mappers()
m1 = mapper(User, users, properties={
- 'name':MyFakeProperty(users.c.name)
+ 'name': MyFakeProperty(users.c.name)
})
m2 = mapper(Address, addresses)
configure_mappers()
@@ -1513,6 +1569,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
recon = []
class User(object):
+
@reconstructor
def reconstruct(self):
recon.append('go')
@@ -1528,19 +1585,23 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users = self.tables.users
recon = []
+
class A(object):
+
@reconstructor
def reconstruct(self):
assert isinstance(self, A)
recon.append('A')
class B(A):
+
@reconstructor
def reconstruct(self):
assert isinstance(self, B)
recon.append('B')
class C(A):
+
@reconstructor
def reconstruct(self):
assert isinstance(self, C)
@@ -1566,7 +1627,9 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
users = self.tables.users
recon = []
+
class Base(object):
+
@reconstructor
def reconstruct(self):
recon.append('go')
@@ -1584,15 +1647,15 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
def test_unmapped_error(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(Address, addresses)
sa.orm.clear_mappers()
mapper(User, users, properties={
- 'addresses':relationship(Address)
+ 'addresses': relationship(Address)
})
assert_raises_message(
@@ -1621,9 +1684,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
Address = self.classes.Address
mapper(User, users, properties={
- "addresses": relationship(Address,
- primaryjoin=lambda: users.c.id == addresses.wrong.user_id)
- })
+ "addresses": relationship(
+ Address,
+ primaryjoin=lambda: users.c.id == addresses.wrong.user_id)
+ })
mapper(Address, addresses)
assert_raises_message(
AttributeError,
@@ -1638,10 +1702,10 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
Address = self.classes.Address
mapper(User, users, properties={
- "addresses": relationship(Address,
- primaryjoin=lambda: users.c.id ==
- addresses.__dict__['wrong'].user_id)
- })
+ "addresses": relationship(Address,
+ primaryjoin=lambda: users.c.id ==
+ addresses.__dict__['wrong'].user_id)
+ })
mapper(Address, addresses)
assert_raises_message(
KeyError,
@@ -1654,6 +1718,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class Base(object):
pass
+
class Sub(Base):
pass
@@ -1671,7 +1736,7 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# using it with an ORM operation, raises
assert_raises(sa.orm.exc.UnmappedClassError,
- create_session().add, Sub())
+ create_session().add, Sub())
def test_unmapped_subclass_error_premap(self):
users = self.tables.users
@@ -1697,13 +1762,14 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# using it with an ORM operation, raises
assert_raises(sa.orm.exc.UnmappedClassError,
- create_session().add, Sub())
+ create_session().add, Sub())
def test_oldstyle_mixin(self):
users = self.tables.users
class OldStyle:
pass
+
class NewStyle(object):
pass
@@ -1717,22 +1783,26 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
mapper(B, users)
+
class DocumentTest(fixtures.TestBase):
def test_doc_propagate(self):
metadata = MetaData()
t1 = Table('t1', metadata,
- Column('col1', Integer, primary_key=True, doc="primary key column"),
- Column('col2', String, doc="data col"),
- Column('col3', String, doc="data col 2"),
- Column('col4', String, doc="data col 3"),
- Column('col5', String),
- )
+ Column('col1', Integer, primary_key=True,
+ doc="primary key column"),
+ Column('col2', String, doc="data col"),
+ Column('col3', String, doc="data col 2"),
+ Column('col4', String, doc="data col 3"),
+ Column('col5', String),
+ )
t2 = Table('t2', metadata,
- Column('col1', Integer, primary_key=True, doc="primary key column"),
- Column('col2', String, doc="data col"),
- Column('col3', Integer, ForeignKey('t1.col1'), doc="foreign key to t1.col1")
- )
+ Column('col1', Integer, primary_key=True,
+ doc="primary key column"),
+ Column('col2', String, doc="data col"),
+ Column('col3', Integer, ForeignKey('t1.col1'),
+ doc="foreign key to t1.col1")
+ )
class Foo(object):
pass
@@ -1741,12 +1811,12 @@ class DocumentTest(fixtures.TestBase):
pass
mapper(Foo, t1, properties={
- 'bars':relationship(Bar,
- doc="bar relationship",
- backref=backref('foo',doc='foo relationship')
- ),
- 'foober':column_property(t1.c.col3, doc='alternate data col'),
- 'hoho':synonym("col4", doc="syn of col4")
+ 'bars': relationship(Bar,
+ doc="bar relationship",
+ backref=backref('foo', doc='foo relationship')
+ ),
+ 'foober': column_property(t1.c.col3, doc='alternate data col'),
+ 'hoho': synonym("col4", doc="syn of col4")
})
mapper(Bar, t2)
configure_mappers()
@@ -1759,7 +1829,9 @@ class DocumentTest(fixtures.TestBase):
eq_(Bar.col1.__doc__, "primary key column")
eq_(Bar.foo.__doc__, "foo relationship")
+
class ORMLoggingTest(_fixtures.FixtureTest):
+
def setup(self):
self.buf = logging.handlers.BufferingHandler(100)
for log in [
@@ -1787,18 +1859,19 @@ class ORMLoggingTest(_fixtures.FixtureTest):
for msg in self._current_messages():
assert msg.startswith('(User|%%(%d anon)s) ' % id(tb))
+
class OptionsTest(_fixtures.FixtureTest):
def test_synonym_options(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='select',
- order_by=addresses.c.id),
- adlist = synonym('addresses')))
+ addresses=relationship(mapper(Address, addresses), lazy='select',
+ order_by=addresses.c.id),
+ adlist=synonym('addresses')))
def go():
sess = create_session()
@@ -1814,13 +1887,13 @@ class OptionsTest(_fixtures.FixtureTest):
"""A lazy relationship can be upgraded to an eager relationship."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses),
- order_by=addresses.c.id)))
+ addresses=relationship(mapper(Address, addresses),
+ order_by=addresses.c.id)))
sess = create_session()
l = (sess.query(User).
@@ -1833,9 +1906,9 @@ class OptionsTest(_fixtures.FixtureTest):
def test_eager_options_with_limit(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
addresses=relationship(mapper(Address, addresses), lazy='select')))
@@ -1858,12 +1931,12 @@ class OptionsTest(_fixtures.FixtureTest):
def test_lazy_options_with_limit(self):
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='joined')))
+ addresses=relationship(mapper(Address, addresses), lazy='joined')))
sess = create_session()
u = (sess.query(User).
@@ -1880,16 +1953,17 @@ class OptionsTest(_fixtures.FixtureTest):
if eager columns are not available"""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses),
- lazy='joined', order_by=addresses.c.id)))
+ addresses=relationship(mapper(Address, addresses),
+ lazy='joined', order_by=addresses.c.id)))
sess = create_session()
# first test straight eager load, 1 statement
+
def go():
l = sess.query(User).order_by(User.id).all()
eq_(l, self.static.user_address_result)
@@ -1902,24 +1976,27 @@ class OptionsTest(_fixtures.FixtureTest):
# (previous users in session fell out of scope and were removed from
# session's identity map)
r = users.select().order_by(users.c.id).execute()
+
def go():
l = list(sess.query(User).instances(r))
eq_(l, self.static.user_address_result)
self.sql_count_(4, go)
def test_eager_degrade_deep(self):
- users, Keyword, items, order_items, orders, Item, User, Address, keywords, item_keywords, Order, addresses = (self.tables.users,
- self.classes.Keyword,
- self.tables.items,
- self.tables.order_items,
- self.tables.orders,
- self.classes.Item,
- self.classes.User,
- self.classes.Address,
- self.tables.keywords,
- self.tables.item_keywords,
- self.classes.Order,
- self.tables.addresses)
+ users, Keyword, items, order_items, orders, \
+ Item, User, Address, keywords, item_keywords, Order, addresses = (
+ self.tables.users,
+ self.classes.Keyword,
+ self.tables.items,
+ self.tables.order_items,
+ self.tables.orders,
+ self.classes.Item,
+ self.classes.User,
+ self.classes.Address,
+ self.tables.keywords,
+ self.tables.item_keywords,
+ self.classes.Order,
+ self.tables.addresses)
# test with a deeper set of eager loads. when we first load the three
# users, they will have no addresses or orders. the number of lazy
@@ -1931,18 +2008,18 @@ class OptionsTest(_fixtures.FixtureTest):
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
- lazy='joined',
- order_by=item_keywords.c.keyword_id)))
+ lazy='joined',
+ order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy='joined',
- order_by=order_items.c.item_id)))
+ order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy='joined',
- order_by=addresses.c.id),
+ order_by=addresses.c.id),
orders=relationship(Order, lazy='joined',
- order_by=orders.c.id)))
+ order_by=orders.c.id)))
sess = create_session()
@@ -1957,6 +2034,7 @@ class OptionsTest(_fixtures.FixtureTest):
# then select just from users. run it into instances.
# then assert the data, which will launch 6 more lazy loads
r = users.select().execute()
+
def go():
l = list(sess.query(User).instances(r))
eq_(l, self.static.user_all_result)
@@ -1966,12 +2044,12 @@ class OptionsTest(_fixtures.FixtureTest):
"""An eager relationship can be upgraded to a lazy relationship."""
Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='joined')
+ addresses=relationship(mapper(Address, addresses), lazy='joined')
))
sess = create_session()
@@ -1984,19 +2062,20 @@ class OptionsTest(_fixtures.FixtureTest):
self.sql_count_(4, go)
def test_option_propagate(self):
- users, items, order_items, Order, Item, User, orders = (self.tables.users,
- self.tables.items,
- self.tables.order_items,
- self.classes.Order,
- self.classes.Item,
- self.classes.User,
- self.tables.orders)
+ users, items, order_items, Order, Item, User, orders = (
+ self.tables.users,
+ self.tables.items,
+ self.tables.order_items,
+ self.classes.Order,
+ self.classes.Item,
+ self.classes.User,
+ self.tables.orders)
mapper(User, users, properties=dict(
- orders = relationship(Order)
+ orders=relationship(Order)
))
mapper(Order, orders, properties=dict(
- items = relationship(Item, secondary=order_items)
+ items=relationship(Item, secondary=order_items)
))
mapper(Item, items)
@@ -2005,35 +2084,39 @@ class OptionsTest(_fixtures.FixtureTest):
oalias = aliased(Order)
opt1 = sa.orm.joinedload(User.orders, Order.items)
opt2 = sa.orm.contains_eager(User.orders, Order.items, alias=oalias)
- u1 = sess.query(User).join(oalias, User.orders).options(opt1, opt2).first()
+ u1 = sess.query(User).join(oalias, User.orders).\
+ options(opt1, opt2).first()
ustate = attributes.instance_state(u1)
assert opt1 in ustate.load_options
assert opt2 not in ustate.load_options
class DeepOptionsTest(_fixtures.FixtureTest):
+
@classmethod
def setup_mappers(cls):
- users, Keyword, items, order_items, Order, Item, User, keywords, item_keywords, orders = (cls.tables.users,
- cls.classes.Keyword,
- cls.tables.items,
- cls.tables.order_items,
- cls.classes.Order,
- cls.classes.Item,
- cls.classes.User,
- cls.tables.keywords,
- cls.tables.item_keywords,
- cls.tables.orders)
+ users, Keyword, items, order_items, Order, Item, User, \
+ keywords, item_keywords, orders = (
+ cls.tables.users,
+ cls.classes.Keyword,
+ cls.tables.items,
+ cls.tables.order_items,
+ cls.classes.Order,
+ cls.classes.Item,
+ cls.classes.User,
+ cls.tables.keywords,
+ cls.tables.item_keywords,
+ cls.tables.orders)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, item_keywords,
- order_by=item_keywords.c.item_id)))
+ order_by=item_keywords.c.item_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, order_items,
- order_by=items.c.id)))
+ order_by=items.c.id)))
mapper(User, users, order_by=users.c.id, properties=dict(
orders=relationship(Order, order_by=orders.c.id)))
@@ -2045,8 +2128,9 @@ class DeepOptionsTest(_fixtures.FixtureTest):
# joinedload nothing.
u = sess.query(User).all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.assert_sql_count(testing.db, go, 3)
def test_deep_options_2(self):
@@ -2054,24 +2138,24 @@ class DeepOptionsTest(_fixtures.FixtureTest):
User = self.classes.User
-
sess = create_session()
l = (sess.query(User).
- options(sa.orm.joinedload_all('orders.items.keywords'))).all()
+ options(sa.orm.joinedload_all('orders.items.keywords'))).all()
+
def go():
- x = l[0].orders[1].items[0].keywords[1]
+ l[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
sess = create_session()
l = (sess.query(User).
- options(sa.orm.subqueryload_all('orders.items.keywords'))).all()
+ options(sa.orm.subqueryload_all('orders.items.keywords'))).all()
+
def go():
- x = l[0].orders[1].items[0].keywords[1]
+ l[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
-
def test_deep_options_3(self):
User = self.classes.User
@@ -2083,14 +2167,15 @@ class DeepOptionsTest(_fixtures.FixtureTest):
options(sa.orm.joinedload('orders.items')).
options(sa.orm.joinedload('orders.items.keywords')))
u = q2.all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
def test_deep_options_4(self):
Item, User, Order = (self.classes.Item,
- self.classes.User,
- self.classes.Order)
+ self.classes.User,
+ self.classes.Order)
sess = create_session()
@@ -2103,25 +2188,31 @@ class DeepOptionsTest(_fixtures.FixtureTest):
# joinedload "keywords" on items. it will lazy load "orders", then
# lazy load the "items" on the order, but on "items" it will eager
# load the "keywords"
- q3 = sess.query(User).options(sa.orm.joinedload('orders.items.keywords'))
+ q3 = sess.query(User).options(
+ sa.orm.joinedload('orders.items.keywords'))
u = q3.all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.sql_count_(2, go)
sess = create_session()
q3 = sess.query(User).options(
- sa.orm.joinedload(User.orders, Order.items, Item.keywords))
+ sa.orm.joinedload(User.orders, Order.items, Item.keywords))
u = q3.all()
+
def go():
- x = u[0].orders[1].items[0].keywords[1]
+ u[0].orders[1].items[0].keywords[1]
self.sql_count_(2, go)
+
class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
+
def test_kwarg_accepted(self):
users, Address = self.tables.users, self.classes.Address
class DummyComposite(object):
+
def __init__(self, x, y):
pass
@@ -2151,41 +2242,56 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
class MyFactory(ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
- return func.foobar(self.__clause_element__()) == func.foobar(other)
- mapper(User, users, properties={'name':column_property(users.c.name, comparator_factory=MyFactory)})
- self.assert_compile(User.name == 'ed', "foobar(users.name) = foobar(:foobar_1)", dialect=default.DefaultDialect())
- self.assert_compile(aliased(User).name == 'ed', "foobar(users_1.name) = foobar(:foobar_1)", dialect=default.DefaultDialect())
+ return func.foobar(self.__clause_element__()) == \
+ func.foobar(other)
+ mapper(
+ User, users,
+ properties={
+ 'name': column_property(
+ users.c.name, comparator_factory=MyFactory)})
+ self.assert_compile(
+ User.name == 'ed',
+ "foobar(users.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect()
+ )
+ self.assert_compile(
+ aliased(User).name == 'ed',
+ "foobar(users_1.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
def test_synonym(self):
users, User = self.tables.users, self.classes.User
from sqlalchemy.orm.properties import ColumnProperty
+
class MyFactory(ColumnProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self.__clause_element__()) ==\
- func.foobar(other)
+ func.foobar(other)
mapper(User, users, properties={
- 'name':synonym('_name', map_column=True,
- comparator_factory=MyFactory)
- })
+ 'name': synonym('_name', map_column=True,
+ comparator_factory=MyFactory)
+ })
self.assert_compile(
- User.name == 'ed',
- "foobar(users.name) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ User.name == 'ed',
+ "foobar(users.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(
- aliased(User).name == 'ed',
- "foobar(users_1.name) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ aliased(User).name == 'ed',
+ "foobar(users_1.name) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
def test_relationship(self):
users, Address, addresses, User = (self.tables.users,
- self.classes.Address,
- self.tables.addresses,
- self.classes.User)
+ self.classes.Address,
+ self.tables.addresses,
+ self.classes.User)
from sqlalchemy.orm.properties import RelationshipProperty
@@ -2194,46 +2300,50 @@ class ComparatorFactoryTest(_fixtures.FixtureTest, AssertsCompiledSQL):
# primaryjoin/secondaryjoin
class MyFactory(RelationshipProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self._source_selectable().c.user_id) == \
func.foobar(other.id)
class MyFactory2(RelationshipProperty.Comparator):
__hash__ = None
+
def __eq__(self, other):
return func.foobar(self._source_selectable().c.id) == \
func.foobar(other.user_id)
mapper(User, users)
mapper(Address, addresses, properties={
- 'user': relationship(User, comparator_factory=MyFactory,
+ 'user': relationship(
+ User, comparator_factory=MyFactory,
backref=backref("addresses", comparator_factory=MyFactory2)
)
- }
+ }
)
# these are kind of nonsensical tests.
self.assert_compile(Address.user == User(id=5),
- "foobar(addresses.user_id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ "foobar(addresses.user_id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(User.addresses == Address(id=5, user_id=7),
- "foobar(users.id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ "foobar(users.id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(
- aliased(Address).user == User(id=5),
- "foobar(addresses_1.user_id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
+ aliased(Address).user == User(id=5),
+ "foobar(addresses_1.user_id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
self.assert_compile(
- aliased(User).addresses == Address(id=5, user_id=7),
- "foobar(users_1.id) = foobar(:foobar_1)",
- dialect=default.DefaultDialect())
-
+ aliased(User).addresses == Address(id=5, user_id=7),
+ "foobar(users_1.id) = foobar(:foobar_1)",
+ dialect=default.DefaultDialect())
class SecondaryOptionsTest(fixtures.MappedTest):
- """test that the contains_eager() option doesn't bleed into a secondary load."""
+
+ """test that the contains_eager() option doesn't bleed
+ into a secondary load."""
run_inserts = 'once'
@@ -2242,80 +2352,84 @@ class SecondaryOptionsTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("base", metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(50), nullable=False)
- )
+ Column('id', Integer, primary_key=True),
+ Column('type', String(50), nullable=False)
+ )
Table("child1", metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- Column('child2id', Integer, ForeignKey('child2.id'), nullable=False)
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ Column(
+ 'child2id', Integer, ForeignKey('child2.id'), nullable=False)
+ )
Table("child2", metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ )
Table('related', metadata,
- Column('id', Integer, ForeignKey('base.id'), primary_key=True),
- )
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ )
@classmethod
def setup_mappers(cls):
child1, child2, base, related = (cls.tables.child1,
- cls.tables.child2,
- cls.tables.base,
- cls.tables.related)
+ cls.tables.child2,
+ cls.tables.base,
+ cls.tables.related)
class Base(cls.Comparable):
pass
+
class Child1(Base):
pass
+
class Child2(Base):
pass
+
class Related(cls.Comparable):
pass
mapper(Base, base, polymorphic_on=base.c.type, properties={
- 'related':relationship(Related, uselist=False)
+ 'related': relationship(Related, uselist=False)
})
mapper(Child1, child1, inherits=Base,
- polymorphic_identity='child1',
- properties={
- 'child2':relationship(Child2,
- primaryjoin=child1.c.child2id==base.c.id,
- foreign_keys=child1.c.child2id)
- })
+ polymorphic_identity='child1',
+ properties={
+ 'child2': relationship(Child2,
+ primaryjoin=child1.c.child2id == base.c.id,
+ foreign_keys=child1.c.child2id)
+ })
mapper(Child2, child2, inherits=Base, polymorphic_identity='child2')
mapper(Related, related)
@classmethod
def insert_data(cls):
child1, child2, base, related = (cls.tables.child1,
- cls.tables.child2,
- cls.tables.base,
- cls.tables.related)
+ cls.tables.child2,
+ cls.tables.base,
+ cls.tables.related)
base.insert().execute([
- {'id':1, 'type':'child1'},
- {'id':2, 'type':'child1'},
- {'id':3, 'type':'child1'},
- {'id':4, 'type':'child2'},
- {'id':5, 'type':'child2'},
- {'id':6, 'type':'child2'},
+ {'id': 1, 'type': 'child1'},
+ {'id': 2, 'type': 'child1'},
+ {'id': 3, 'type': 'child1'},
+ {'id': 4, 'type': 'child2'},
+ {'id': 5, 'type': 'child2'},
+ {'id': 6, 'type': 'child2'},
])
child2.insert().execute([
- {'id':4},
- {'id':5},
- {'id':6},
+ {'id': 4},
+ {'id': 5},
+ {'id': 6},
])
child1.insert().execute([
- {'id':1, 'child2id':4},
- {'id':2, 'child2id':5},
- {'id':3, 'child2id':6},
+ {'id': 1, 'child2id': 4},
+ {'id': 2, 'child2id': 5},
+ {'id': 3, 'child2id': 6},
])
related.insert().execute([
- {'id':1},
- {'id':2},
- {'id':3},
- {'id':4},
- {'id':5},
- {'id':6},
+ {'id': 1},
+ {'id': 2},
+ {'id': 3},
+ {'id': 4},
+ {'id': 5},
+ {'id': 6},
])
def test_contains_eager(self):
@@ -2324,9 +2438,9 @@ class SecondaryOptionsTest(fixtures.MappedTest):
sess = create_session()
child1s = sess.query(Child1).\
- join(Child1.related).\
- options(sa.orm.contains_eager(Child1.related)).\
- order_by(Child1.id)
+ join(Child1.related).\
+ options(sa.orm.contains_eager(Child1.related)).\
+ order_by(Child1.id)
def go():
eq_(
@@ -2345,10 +2459,11 @@ class SecondaryOptionsTest(fixtures.MappedTest):
testing.db,
lambda: c1.child2,
CompiledSQL(
- "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type "
+ "SELECT child2.id AS child2_id, base.id AS base_id, "
+ "base.type AS base_type "
"FROM base JOIN child2 ON base.id = child2.id "
"WHERE base.id = :param_1",
- {'param_1':4}
+ {'param_1': 4}
)
)
@@ -2357,12 +2472,15 @@ class SecondaryOptionsTest(fixtures.MappedTest):
sess = create_session()
- child1s = sess.query(Child1).join(Child1.related).options(sa.orm.joinedload(Child1.related)).order_by(Child1.id)
+ child1s = sess.query(Child1).join(Child1.related).options(
+ sa.orm.joinedload(Child1.related)).order_by(Child1.id)
def go():
eq_(
child1s.all(),
- [Child1(id=1, related=Related(id=1)), Child1(id=2, related=Related(id=2)), Child1(id=3, related=Related(id=3))]
+ [Child1(id=1, related=Related(id=1)),
+ Child1(id=2, related=Related(id=2)),
+ Child1(id=3, related=Related(id=3))]
)
self.assert_sql_count(testing.db, go, 1)
@@ -2372,30 +2490,32 @@ class SecondaryOptionsTest(fixtures.MappedTest):
testing.db,
lambda: c1.child2,
CompiledSQL(
- "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type "
- "FROM base JOIN child2 ON base.id = child2.id WHERE base.id = :param_1",
-
-# joinedload- this shouldn't happen
-# "SELECT base.id AS base_id, child2.id AS child2_id, base.type AS base_type, "
-# "related_1.id AS related_1_id FROM base JOIN child2 ON base.id = child2.id "
-# "LEFT OUTER JOIN related AS related_1 ON base.id = related_1.id WHERE base.id = :param_1",
- {'param_1':4}
+ "SELECT child2.id AS child2_id, base.id AS base_id, "
+ "base.type AS base_type "
+ "FROM base JOIN child2 ON base.id = child2.id "
+ "WHERE base.id = :param_1",
+
+ {'param_1': 4}
)
)
def test_joinedload_on_same(self):
Child1, Child2, Related = (self.classes.Child1,
- self.classes.Child2,
- self.classes.Related)
+ self.classes.Child2,
+ self.classes.Related)
sess = create_session()
- child1s = sess.query(Child1).join(Child1.related).options(sa.orm.joinedload(Child1.child2, Child2.related)).order_by(Child1.id)
+ child1s = sess.query(Child1).join(Child1.related).options(
+ sa.orm.joinedload(Child1.child2, Child2.related)
+ ).order_by(Child1.id)
def go():
eq_(
child1s.all(),
- [Child1(id=1, related=Related(id=1)), Child1(id=2, related=Related(id=2)), Child1(id=3, related=Related(id=3))]
+ [Child1(id=1, related=Related(id=1)),
+ Child1(id=2, related=Related(id=2)),
+ Child1(id=3, related=Related(id=3))]
)
self.assert_sql_count(testing.db, go, 4)
@@ -2406,32 +2526,43 @@ class SecondaryOptionsTest(fixtures.MappedTest):
testing.db,
lambda: c1.child2,
CompiledSQL(
- "SELECT child2.id AS child2_id, base.id AS base_id, base.type AS base_type, "
- "related_1.id AS related_1_id FROM base JOIN child2 ON base.id = child2.id "
- "LEFT OUTER JOIN related AS related_1 ON base.id = related_1.id WHERE base.id = :param_1",
- {'param_1':4}
+ "SELECT child2.id AS child2_id, base.id AS base_id, "
+ "base.type AS base_type, "
+ "related_1.id AS related_1_id FROM base JOIN child2 "
+ "ON base.id = child2.id "
+ "LEFT OUTER JOIN related AS related_1 "
+ "ON base.id = related_1.id WHERE base.id = :param_1",
+ {'param_1': 4}
)
)
class DeferredPopulationTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table("thing", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("name", String(20)))
+ Column(
+ "id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("name", String(20)))
Table("human", metadata,
- Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
- Column("thing_id", Integer, ForeignKey("thing.id")),
- Column("name", String(20)))
+ Column(
+ "id", Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column("thing_id", Integer, ForeignKey("thing.id")),
+ Column("name", String(20)))
@classmethod
def setup_mappers(cls):
thing, human = cls.tables.thing, cls.tables.human
- class Human(cls.Basic): pass
- class Thing(cls.Basic): pass
+ class Human(cls.Basic):
+ pass
+
+ class Thing(cls.Basic):
+ pass
mapper(Human, human, properties={"thing": relationship(Thing)})
mapper(Thing, thing, properties={"name": deferred(thing.c.name)})
@@ -2462,7 +2593,7 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing = self.classes.Thing
session = create_session()
- result = session.query(Thing).first()
+ result = session.query(Thing).first() # noqa
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2471,7 +2602,7 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing = self.classes.Thing
session = create_session()
- result = session.query(Thing).first()
+ result = session.query(Thing).first() # noqa
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2479,7 +2610,8 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- human = session.query(Human).options(sa.orm.joinedload("thing")).first()
+ human = session.query(Human).options( # noqa
+ sa.orm.joinedload("thing")).first()
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2488,7 +2620,8 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- human = session.query(Human).options(sa.orm.joinedload("thing")).first()
+ human = session.query(Human).options( # noqa
+ sa.orm.joinedload("thing")).first()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2496,7 +2629,8 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- result = session.query(Human).add_entity(Thing).join("thing").first()
+ result = session.query(Human).add_entity( # noqa
+ Thing).join("thing").first()
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2505,88 +2639,119 @@ class DeferredPopulationTest(fixtures.MappedTest):
Thing, Human = self.classes.Thing, self.classes.Human
session = create_session()
- result = session.query(Human).add_entity(Thing).join("thing").first()
+ result = session.query(Human).add_entity( # noqa
+ Thing).join("thing").first()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
-
-
class NoLoadTest(_fixtures.FixtureTest):
run_inserts = 'once'
run_deletes = None
- def test_basic(self):
- """A basic one-to-many lazy load"""
+ def test_o2m_noload(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ Address, addresses, users, User = (
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='noload')
+ addresses=relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m)
l = [None]
+
def go():
x = q.filter(User.id == 7).all()
x[0].addresses
l[0] = x
self.assert_sql_count(testing.db, go, 1)
- self.assert_result(l[0], User,
- {'id' : 7, 'addresses' : (Address, [])},
- )
+ self.assert_result(
+ l[0], User,
+ {'id': 7, 'addresses': (Address, [])},
+ )
- def test_options(self):
- Address, addresses, users, User = (self.classes.Address,
- self.tables.addresses,
- self.tables.users,
- self.classes.User)
+ def test_upgrade_o2m_noload_lazyload_option(self):
+ Address, addresses, users, User = (
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy='noload')
+ addresses=relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m).options(sa.orm.lazyload('addresses'))
l = [None]
+
def go():
x = q.filter(User.id == 7).all()
x[0].addresses
l[0] = x
self.sql_count_(2, go)
- self.assert_result(l[0], User,
- {'id' : 7, 'addresses' : (Address, [{'id' : 1}])},
- )
-
+ self.assert_result(
+ l[0], User,
+ {'id': 7, 'addresses': (Address, [{'id': 1}])},
+ )
+ def test_m2o_noload_option(self):
+ Address, addresses, users, User = (
+ self.classes.Address,
+ self.tables.addresses,
+ self.tables.users,
+ self.classes.User)
+ mapper(Address, addresses, properties={
+ 'user': relationship(User)
+ })
+ mapper(User, users)
+ s = Session()
+ a1 = s.query(Address).filter_by(id=1).options(
+ sa.orm.noload('user')).first()
+ def go():
+ eq_(a1.user, None)
+ self.sql_count_(0, go)
class RequirementsTest(fixtures.MappedTest):
+
"""Tests the contract for user classes."""
@classmethod
def define_tables(cls, metadata):
Table('ht1', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('value', String(10)))
Table('ht2', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('ht1_id', Integer, ForeignKey('ht1.id')),
Column('value', String(10)))
Table('ht3', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('value', String(10)))
Table('ht4', metadata,
- Column('ht1_id', Integer, ForeignKey('ht1.id'), primary_key=True),
- Column('ht3_id', Integer, ForeignKey('ht3.id'), primary_key=True))
+ Column('ht1_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('ht3_id', Integer, ForeignKey('ht3.id'),
+ primary_key=True))
Table('ht5', metadata,
- Column('ht1_id', Integer, ForeignKey('ht1.id'), primary_key=True))
+ Column('ht1_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True))
Table('ht6', metadata,
- Column('ht1a_id', Integer, ForeignKey('ht1.id'), primary_key=True),
- Column('ht1b_id', Integer, ForeignKey('ht1.id'), primary_key=True),
+ Column('ht1a_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
+ Column('ht1b_id', Integer, ForeignKey('ht1.id'),
+ primary_key=True),
Column('value', String(10)))
if util.py2k:
@@ -2604,16 +2769,21 @@ class RequirementsTest(fixtures.MappedTest):
pass
# TODO: is weakref support detectable without an instance?
- #self.assertRaises(sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
+ # self.assertRaises(
+ # sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
class _ValueBase(object):
+
def __init__(self, value='abc', id=None):
self.id = id
self.value = value
+
def __bool__(self):
return False
+
def __hash__(self):
return hash(self.value)
+
def __eq__(self, other):
if isinstance(other, type(self)):
return self.value == other.value
@@ -2630,19 +2800,21 @@ class RequirementsTest(fixtures.MappedTest):
"""
ht6, ht5, ht4, ht3, ht2, ht1 = (self.tables.ht6,
- self.tables.ht5,
- self.tables.ht4,
- self.tables.ht3,
- self.tables.ht2,
- self.tables.ht1)
-
+ self.tables.ht5,
+ self.tables.ht4,
+ self.tables.ht3,
+ self.tables.ht2,
+ self.tables.ht1)
class H1(self._ValueBase):
pass
+
class H2(self._ValueBase):
pass
+
class H3(self._ValueBase):
pass
+
class H6(self._ValueBase):
pass
@@ -2651,10 +2823,10 @@ class RequirementsTest(fixtures.MappedTest):
'h3s': relationship(H3, secondary=ht4, backref='h1s'),
'h1s': relationship(H1, secondary=ht5, backref='parent_h1'),
't6a': relationship(H6, backref='h1a',
- primaryjoin=ht1.c.id==ht6.c.ht1a_id),
+ primaryjoin=ht1.c.id == ht6.c.ht1a_id),
't6b': relationship(H6, backref='h1b',
- primaryjoin=ht1.c.id==ht6.c.ht1b_id),
- })
+ primaryjoin=ht1.c.id == ht6.c.ht1b_id),
+ })
mapper(H2, ht2)
mapper(H3, ht3)
mapper(H6, ht6)
@@ -2709,18 +2881,19 @@ class RequirementsTest(fixtures.MappedTest):
sa.orm.joinedload_all('h3s.h1s')).all()
eq_(len(h1s), 5)
-
def test_composite_results(self):
ht2, ht1 = (self.tables.ht2,
- self.tables.ht1)
-
+ self.tables.ht1)
class H1(self._ValueBase):
+
def __init__(self, value, id, h2s):
self.value = value
self.id = id
self.h2s = h2s
+
class H2(self._ValueBase):
+
def __init__(self, value, id):
self.value = value
self.id = id
@@ -2745,8 +2918,8 @@ class RequirementsTest(fixtures.MappedTest):
s.commit()
eq_(
[(h1.value, h1.id, h2.value, h2.id)
- for h1, h2 in
- s.query(H1, H2).join(H1.h2s).order_by(H1.id, H2.id)],
+ for h1, h2 in
+ s.query(H1, H2).join(H1.h2s).order_by(H1.id, H2.id)],
[
('abc', 1, 'abc', 1),
('abc', 1, 'def', 2),
@@ -2761,6 +2934,7 @@ class RequirementsTest(fixtures.MappedTest):
ht1 = self.tables.ht1
class H1(object):
+
def __len__(self):
return len(self.get_value())
@@ -2769,6 +2943,7 @@ class RequirementsTest(fixtures.MappedTest):
return self.value
class H2(object):
+
def __bool__(self):
return bool(self.get_value())
@@ -2781,19 +2956,21 @@ class RequirementsTest(fixtures.MappedTest):
h1 = H1()
h1.value = "Asdf"
- h1.value = "asdf asdf" # ding
+ h1.value = "asdf asdf" # ding
h2 = H2()
h2.value = "Asdf"
- h2.value = "asdf asdf" # ding
+ h2.value = "asdf asdf" # ding
+
class IsUserlandTest(fixtures.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
- Column('id', Integer, primary_key=True),
- Column('someprop', Integer)
- )
+ Column('id', Integer, primary_key=True),
+ Column('someprop', Integer)
+ )
def _test(self, value, instancelevel=None):
class Foo(object):
@@ -2842,17 +3019,20 @@ class IsUserlandTest(fixtures.MappedTest):
return "hi"
self._test(property(somefunc), "hi")
+
class MagicNamesTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('cartographers', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('name', String(50)),
Column('alias', String(50)),
Column('quip', String(100)))
Table('maps', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column('cart_id', Integer,
ForeignKey('cartographers.id')),
Column('state', String(2)),
@@ -2868,9 +3048,9 @@ class MagicNamesTest(fixtures.MappedTest):
def test_mappish(self):
maps, Cartographer, cartographers, Map = (self.tables.maps,
- self.classes.Cartographer,
- self.tables.cartographers,
- self.classes.Map)
+ self.classes.Cartographer,
+ self.tables.cartographers,
+ self.classes.Map)
mapper(Cartographer, cartographers, properties=dict(
query=cartographers.c.quip))
@@ -2879,7 +3059,7 @@ class MagicNamesTest(fixtures.MappedTest):
c = Cartographer(name='Lenny', alias='The Dude',
query='Where be dragons?')
- m = Map(state='AK', mapper=c)
+ Map(state='AK', mapper=c)
sess = create_session()
sess.add(c)
@@ -2889,16 +3069,18 @@ class MagicNamesTest(fixtures.MappedTest):
for C, M in ((Cartographer, Map),
(sa.orm.aliased(Cartographer), sa.orm.aliased(Map))):
c1 = (sess.query(C).
- filter(C.alias=='The Dude').
- filter(C.query=='Where be dragons?')).one()
- m1 = sess.query(M).filter(M.mapper==c1).one()
+ filter(C.alias == 'The Dude').
+ filter(C.query == 'Where be dragons?')).one()
+ sess.query(M).filter(M.mapper == c1).one()
def test_direct_stateish(self):
for reserved in (sa.orm.instrumentation.ClassManager.STATE_ATTR,
sa.orm.instrumentation.ClassManager.MANAGER_ATTR):
t = Table('t', sa.MetaData(),
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
Column(reserved, Integer))
+
class T(object):
pass
assert_raises_message(
@@ -2920,6 +3102,4 @@ class MagicNamesTest(fixtures.MappedTest):
('requested attribute name conflicts with '
'instrumentation attribute of the same name'),
mapper, M, maps, properties={
- reserved: maps.c.state})
-
-
+ reserved: maps.c.state})
diff --git a/test/orm/test_options.py b/test/orm/test_options.py
index 1c1a797a6..e1e26c62c 100644
--- a/test/orm/test_options.py
+++ b/test/orm/test_options.py
@@ -2,7 +2,7 @@ from sqlalchemy import inspect
from sqlalchemy.orm import attributes, mapper, relationship, backref, \
configure_mappers, create_session, synonym, Session, class_mapper, \
aliased, column_property, joinedload_all, joinedload, Query,\
- util as orm_util, Load
+ util as orm_util, Load, defer
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.testing.assertions import eq_, assert_raises, assert_raises_message
@@ -46,8 +46,18 @@ class PathTest(object):
set([self._make_path(p) for p in paths])
)
+
class LoadTest(PathTest, QueryTest):
+ def test_str(self):
+ User = self.classes.User
+ l = Load(User)
+ l.strategy = (('deferred', False), ('instrument', True))
+ eq_(
+ str(l),
+ "Load(strategy=(('deferred', False), ('instrument', True)))"
+ )
+
def test_gen_path_attr_entity(self):
User = self.classes.User
Address = self.classes.Address
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 55af023b1..4ae0b010a 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -776,6 +776,42 @@ class InvalidGenerationsTest(QueryTest, AssertsCompiledSQL):
meth, q, *arg, **kw
)
+ def test_illegal_coercions(self):
+ User = self.classes.User
+
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ distinct, User
+ )
+
+ ua = aliased(User)
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ distinct, ua
+ )
+
+ s = Session()
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ lambda: s.query(User).filter(User.name == User)
+ )
+
+ u1 = User()
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ distinct, u1
+ )
+
+ assert_raises_message(
+ sa_exc.ArgumentError,
+ "Object .*User.* is not legal as a SQL literal value",
+ lambda: s.query(User).filter(User.name == u1)
+ )
+
class OperatorTest(QueryTest, AssertsCompiledSQL):
"""test sql.Comparator implementation for MapperProperties"""
@@ -1960,13 +1996,6 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
sess.query(User). \
filter(User.addresses.any(email_address='fred@fred.com')).all()
- # test that any() doesn't overcorrelate
- assert [User(id=7), User(id=8)] == \
- sess.query(User).join("addresses"). \
- filter(
- ~User.addresses.any(
- Address.email_address == 'fred@fred.com')).all()
-
# test that the contents are not adapted by the aliased join
assert [User(id=7), User(id=8)] == \
sess.query(User).join("addresses", aliased=True). \
@@ -1978,6 +2007,18 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
sess.query(User).outerjoin("addresses", aliased=True). \
filter(~User.addresses.any()).all()
+ def test_any_doesnt_overcorrelate(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ # test that any() doesn't overcorrelate
+ assert [User(id=7), User(id=8)] == \
+ sess.query(User).join("addresses"). \
+ filter(
+ ~User.addresses.any(
+ Address.email_address == 'fred@fred.com')).all()
+
def test_has(self):
Dingaling, User, Address = (
self.classes.Dingaling, self.classes.User, self.classes.Address)
@@ -2190,6 +2231,42 @@ class FilterTest(QueryTest, AssertsCompiledSQL):
)
+class HasMapperEntitiesTest(QueryTest):
+ def test_entity(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User)
+
+ assert q._has_mapper_entities
+
+ def test_cols(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User.id)
+
+ assert not q._has_mapper_entities
+
+ def test_cols_set_entities(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User.id)
+
+ q._set_entities(User)
+ assert q._has_mapper_entities
+
+ def test_entity_set_entities(self):
+ User = self.classes.User
+ s = Session()
+
+ q = s.query(User)
+
+ q._set_entities(User.id)
+ assert not q._has_mapper_entities
+
+
class SetOpsTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -3598,6 +3675,60 @@ class ImmediateTest(_fixtures.FixtureTest):
sess.query(User).join(User.addresses).filter(User.id.in_([8, 9])).
order_by(User.id).one)
+ def test_one_or_none(self):
+ User, Address = self.classes.User, self.classes.Address
+
+ sess = create_session()
+
+ eq_(sess.query(User).filter(User.id == 99).one_or_none(), None)
+
+ eq_(sess.query(User).filter(User.id == 7).one_or_none().id, 7)
+
+ assert_raises_message(
+ sa.orm.exc.MultipleResultsFound,
+ "Multiple rows were found for one_or_none\(\)",
+ sess.query(User).one_or_none)
+
+ eq_(sess.query(User.id, User.name).filter(User.id == 99).one_or_none(), None)
+
+ eq_(sess.query(User.id, User.name).filter(User.id == 7).one_or_none(),
+ (7, 'jack'))
+
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User.id, User.name).one_or_none)
+
+ eq_(
+ (sess.query(User, Address).join(User.addresses).
+ filter(Address.id == 99)).one_or_none(), None)
+
+ eq_((sess.query(User, Address).
+ join(User.addresses).
+ filter(Address.id == 4)).one_or_none(),
+ (User(id=8), Address(id=4)))
+
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User, Address).join(User.addresses).one_or_none)
+
+ # this result returns multiple rows, the first
+ # two rows being the same. but uniquing is
+ # not applied for a column based result.
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User.id).join(User.addresses).
+ filter(User.id.in_([8, 9])).order_by(User.id).one_or_none)
+
+ # test that a join which ultimately returns
+ # multiple identities across many rows still
+ # raises, even though the first two rows are of
+ # the same identity and unique filtering
+ # is applied ([ticket:1688])
+ assert_raises(
+ sa.orm.exc.MultipleResultsFound,
+ sess.query(User).join(User.addresses).filter(User.id.in_([8, 9])).
+ order_by(User.id).one_or_none)
+
@testing.future
def test_getslice(self):
assert False
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 58551d763..f6ddcb566 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -493,8 +493,10 @@ class SessionStateTest(_fixtures.FixtureTest):
'is already attached to session',
s2.delete, user)
u2 = s2.query(User).get(user.id)
- assert_raises_message(sa.exc.InvalidRequestError,
- 'another instance with key', s.delete, u2)
+ s2.expunge(u2)
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ 'another instance .* is already present', s.delete, u2)
s.expire(user)
s.expunge(user)
assert user not in s
@@ -543,8 +545,14 @@ class SessionStateTest(_fixtures.FixtureTest):
s.expunge(u2)
s.identity_map.add(sa.orm.attributes.instance_state(u1))
- assert_raises(AssertionError, s.identity_map.add,
- sa.orm.attributes.instance_state(u2))
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "Can't attach instance <User.*?>; another instance "
+ "with key .*? is already "
+ "present in this session.",
+ s.identity_map.add,
+ sa.orm.attributes.instance_state(u2)
+ )
def test_pickled_update(self):
users, User = self.tables.users, pickleable.User
@@ -581,7 +589,13 @@ class SessionStateTest(_fixtures.FixtureTest):
assert u2 is not None and u2 is not u1
assert u2 in sess
- assert_raises(AssertionError, lambda: sess.add(u1))
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "Can't attach instance <User.*?>; another instance "
+ "with key .*? is already "
+ "present in this session.",
+ sess.add, u1
+ )
sess.expunge(u2)
assert u2 not in sess
@@ -1124,11 +1138,56 @@ class WeakIdentityMapTest(_fixtures.FixtureTest):
class StrongIdentityMapTest(_fixtures.FixtureTest):
run_inserts = None
+ def _strong_ident_fixture(self):
+ sess = create_session(weak_identity_map=False)
+ return sess, sess.prune
+
+ def _event_fixture(self):
+ session = create_session()
+
+ @event.listens_for(session, "pending_to_persistent")
+ @event.listens_for(session, "deleted_to_persistent")
+ @event.listens_for(session, "detached_to_persistent")
+ @event.listens_for(session, "loaded_as_persistent")
+ def strong_ref_object(sess, instance):
+ if 'refs' not in sess.info:
+ sess.info['refs'] = refs = set()
+ else:
+ refs = sess.info['refs']
+
+ refs.add(instance)
+
+ @event.listens_for(session, "persistent_to_detached")
+ @event.listens_for(session, "persistent_to_deleted")
+ @event.listens_for(session, "persistent_to_transient")
+ def deref_object(sess, instance):
+ sess.info['refs'].discard(instance)
+
+ def prune():
+ if 'refs' not in session.info:
+ return 0
+
+ sess_size = len(session.identity_map)
+ session.info['refs'].clear()
+ gc_collect()
+ session.info['refs'] = set(
+ s.obj() for s in session.identity_map.all_states())
+ return sess_size - len(session.identity_map)
+
+ return session, prune
+
@testing.uses_deprecated()
- def test_strong_ref(self):
+ def test_strong_ref_imap(self):
+ self._test_strong_ref(self._strong_ident_fixture)
+
+ def test_strong_ref_events(self):
+ self._test_strong_ref(self._event_fixture)
+
+ def _test_strong_ref(self, fixture):
+ s, prune = fixture()
+
users, User = self.tables.users, self.classes.User
- s = create_session(weak_identity_map=False)
mapper(User, users)
# save user
@@ -1148,12 +1207,19 @@ class StrongIdentityMapTest(_fixtures.FixtureTest):
eq_(users.select().execute().fetchall(), [(user.id, 'u2')])
@testing.uses_deprecated()
+ def test_prune_imap(self):
+ self._test_prune(self._strong_ident_fixture)
+
+ def test_prune_events(self):
+ self._test_prune(self._event_fixture)
+
@testing.fails_if(lambda: pypy, "pypy has a real GC")
@testing.fails_on('+zxjdbc', 'http://www.sqlalchemy.org/trac/ticket/1473')
- def test_prune(self):
+ def _test_prune(self, fixture):
+ s, prune = fixture()
+
users, User = self.tables.users, self.classes.User
- s = create_session(weak_identity_map=False)
mapper(User, users)
for o in [User(name='u%s' % x) for x in range(10)]:
@@ -1161,43 +1227,44 @@ class StrongIdentityMapTest(_fixtures.FixtureTest):
# o is still live after this loop...
self.assert_(len(s.identity_map) == 0)
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
s.flush()
gc_collect()
- self.assert_(s.prune() == 9)
+ eq_(prune(), 9)
+ # o is still in local scope here, so still present
self.assert_(len(s.identity_map) == 1)
id = o.id
del o
- self.assert_(s.prune() == 1)
+ eq_(prune(), 1)
self.assert_(len(s.identity_map) == 0)
u = s.query(User).get(id)
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 1)
u.name = 'squiznart'
del u
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 1)
s.flush()
- self.assert_(s.prune() == 1)
+ eq_(prune(), 1)
self.assert_(len(s.identity_map) == 0)
s.add(User(name='x'))
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 0)
s.flush()
self.assert_(len(s.identity_map) == 1)
- self.assert_(s.prune() == 1)
+ eq_(prune(), 1)
self.assert_(len(s.identity_map) == 0)
u = s.query(User).get(id)
s.delete(u)
del u
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 1)
s.flush()
- self.assert_(s.prune() == 0)
+ eq_(prune(), 0)
self.assert_(len(s.identity_map) == 0)
diff --git a/test/orm/test_transaction.py b/test/orm/test_transaction.py
index 91846a67e..73c6b977a 100644
--- a/test/orm/test_transaction.py
+++ b/test/orm/test_transaction.py
@@ -895,7 +895,13 @@ class AutoExpireTest(_LocalFixture):
assert u1_state.obj() is None
s.rollback()
- assert u1_state in s.identity_map.all_states()
+ # new in 1.1, not in identity map if the object was
+ # gc'ed and we restore snapshot; we've changed update_impl
+ # to just skip this object
+ assert u1_state not in s.identity_map.all_states()
+
+ # in any version, the state is replaced by the query
+ # because the identity map would switch it
u1 = s.query(User).filter_by(name='ed').one()
assert u1_state not in s.identity_map.all_states()
assert s.scalar(users.count()) == 1
diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py
index 9e9f400be..09240dfdb 100644
--- a/test/orm/test_unitofworkv2.py
+++ b/test/orm/test_unitofworkv2.py
@@ -1954,3 +1954,215 @@ class TypeWoBoolTest(fixtures.MappedTest, testing.AssertsExecutionResults):
eq_(
s.query(Thing.value).scalar().text, "foo"
)
+
+
+class NullEvaluatingTest(fixtures.MappedTest, testing.AssertsExecutionResults):
+ @classmethod
+ def define_tables(cls, metadata):
+ from sqlalchemy import TypeDecorator
+
+ class EvalsNull(TypeDecorator):
+ impl = String(50)
+
+ should_evaluate_none = True
+
+ def process_bind_param(self, value, dialect):
+ if value is None:
+ value = 'nothing'
+ return value
+
+ Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('evals_null_no_default', EvalsNull()),
+ Column('evals_null_default', EvalsNull(), default='default_val'),
+ Column('no_eval_null_no_default', String(50)),
+ Column('no_eval_null_default', String(50), default='default_val'),
+ Column(
+ 'builtin_evals_null_no_default', String(50).evaluates_none()),
+ Column(
+ 'builtin_evals_null_default',
+ String(50).evaluates_none(), default='default_val'),
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Thing(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Thing = cls.classes.Thing
+
+ mapper(Thing, cls.tables.test)
+
+ def _assert_col(self, name, value):
+ Thing = self.classes.Thing
+ s = Session()
+
+ col = getattr(Thing, name)
+ obj = s.query(col).filter(col == value).one()
+ eq_(obj[0], value)
+
+ def _test_insert(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ t1 = Thing(**{attr: None})
+ s.add(t1)
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def _test_bulk_insert(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.bulk_insert_mappings(
+ Thing, [{attr: None}]
+ )
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def _test_insert_novalue(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ t1 = Thing()
+ s.add(t1)
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def _test_bulk_insert_novalue(self, attr, expected):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.bulk_insert_mappings(
+ Thing, [{}]
+ )
+ s.commit()
+
+ self._assert_col(attr, expected)
+
+ def test_evalnull_nodefault_insert(self):
+ self._test_insert(
+ "evals_null_no_default", 'nothing'
+ )
+
+ def test_evalnull_nodefault_bulk_insert(self):
+ self._test_bulk_insert(
+ "evals_null_no_default", 'nothing'
+ )
+
+ def test_evalnull_nodefault_insert_novalue(self):
+ self._test_insert_novalue(
+ "evals_null_no_default", None
+ )
+
+ def test_evalnull_nodefault_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "evals_null_no_default", None
+ )
+
+ def test_evalnull_default_insert(self):
+ self._test_insert(
+ "evals_null_default", 'nothing'
+ )
+
+ def test_evalnull_default_bulk_insert(self):
+ self._test_bulk_insert(
+ "evals_null_default", 'nothing'
+ )
+
+ def test_evalnull_default_insert_novalue(self):
+ self._test_insert_novalue(
+ "evals_null_default", 'default_val'
+ )
+
+ def test_evalnull_default_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "evals_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_nodefault_insert(self):
+ self._test_insert(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_nodefault_bulk_insert(self):
+ self._test_bulk_insert(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_nodefault_insert_novalue(self):
+ self._test_insert_novalue(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_nodefault_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "no_eval_null_no_default", None
+ )
+
+ def test_no_evalnull_default_insert(self):
+ self._test_insert(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_default_bulk_insert(self):
+ self._test_bulk_insert(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_default_insert_novalue(self):
+ self._test_insert_novalue(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_no_evalnull_default_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "no_eval_null_default", 'default_val'
+ )
+
+ def test_builtin_evalnull_nodefault_insert(self):
+ self._test_insert(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_nodefault_bulk_insert(self):
+ self._test_bulk_insert(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_nodefault_insert_novalue(self):
+ self._test_insert_novalue(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_nodefault_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "builtin_evals_null_no_default", None
+ )
+
+ def test_builtin_evalnull_default_insert(self):
+ self._test_insert(
+ "builtin_evals_null_default", None
+ )
+
+ def test_builtin_evalnull_default_bulk_insert(self):
+ self._test_bulk_insert(
+ "builtin_evals_null_default", None
+ )
+
+ def test_builtin_evalnull_default_insert_novalue(self):
+ self._test_insert_novalue(
+ "builtin_evals_null_default", 'default_val'
+ )
+
+ def test_builtin_evalnull_default_bulk_insert_novalue(self):
+ self._test_bulk_insert_novalue(
+ "builtin_evals_null_default", 'default_val'
+ )
diff --git a/test/profiles.txt b/test/profiles.txt
index 691d1a54d..f6b682be1 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -38,7 +38,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_compiler.CompileTest.test_select 2.6_sqlite_pysqlite_nocextensions 157
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 153
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 153
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 157
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 153
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 153
test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 153
@@ -60,7 +60,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.6_sqlite_pysqlite_nocextensions 190
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 188
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 188
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 188
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 190
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 188
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 188
test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 188
@@ -104,7 +104,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.6_sqlite_pysqlite_nocextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 146
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 147
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 146
@@ -117,7 +117,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_cextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_nocextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 146
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 147
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_cextensions 146
test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_nocextensions 146
@@ -126,7 +126,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.6_sqlite_pysqlite_nocextensions 4262
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_cextensions 4262
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_nocextensions 4262
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_cextensions 4262
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_cextensions 4257
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_nocextensions 4262
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4262
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4262
@@ -139,7 +139,7 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_pymysql_cextensions 4263
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_pymysql_nocextensions 4263
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_nocextensions 4263
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_nocextensions 4258
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_cextensions 4263
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_nocextensions 4263
@@ -170,7 +170,7 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.6_sqlite_pysqlite_nocextensions 26358
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_cextensions 16194
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_nocextensions 25197
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_cextensions 28177
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_cextensions 29184
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 37180
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 16329
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 25332
@@ -183,7 +183,7 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_n
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_cextensions 83733
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_nocextensions 92736
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_cextensions 18221
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_nocextensions 27224
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_nocextensions 27201
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_cextensions 18393
test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_nocextensions 27396
@@ -192,7 +192,7 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_n
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.6_sqlite_pysqlite_nocextensions 26282
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_cextensions 22212
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_nocextensions 25215
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_cextensions 22183
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_cextensions 23196
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 25186
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 22269
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 25272
@@ -205,7 +205,7 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pys
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_cextensions 47353
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_nocextensions 50356
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_cextensions 24215
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_nocextensions 27218
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_nocextensions 27220
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_cextensions 24321
test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_nocextensions 27324
@@ -236,7 +236,7 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.6_sqlite_pysqlite_nocextensions 161101
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_cextensions 127101
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_nocextensions 128851
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_cextensions 120101
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_cextensions 123351
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 121851
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 156351
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 158054
@@ -249,7 +249,7 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_cextensions 187056
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_nocextensions 188855
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_cextensions 128556
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_nocextensions 130306
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_nocextensions 130356
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_cextensions 168806
test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_nocextensions 170556
@@ -258,7 +258,7 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.6_sqlite_pysqlite_nocextensions 21505
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_cextensions 19393
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_nocextensions 19597
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_cextensions 18881
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_cextensions 19024
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19085
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 21186
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 21437
@@ -271,7 +271,7 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_cextensions 23716
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_nocextensions 23871
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_cextensions 19552
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_nocextensions 19744
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_nocextensions 19727
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_cextensions 22051
test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_nocextensions 22255
@@ -280,7 +280,7 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.6_sqlite_pysqlite_nocextensions 1520
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_cextensions 1400
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_nocextensions 1415
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_cextensions 1319
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_cextensions 1309
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1334
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1527
test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1542
@@ -293,7 +293,7 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocext
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_cextensions 2038
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_nocextensions 2053
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_cextensions 1335
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_nocextensions 1350
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_nocextensions 1354
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_cextensions 1577
test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_nocextensions 1592
@@ -302,7 +302,7 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_nocext
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.6_sqlite_pysqlite_nocextensions 89,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_cextensions 93,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_nocextensions 93,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_cextensions 93,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_cextensions 91,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 93,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 93,19
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 93,19
@@ -315,7 +315,7 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_noc
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_cextensions 92,20
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_nocextensions 92,20
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_cextensions 92,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_nocextensions 92,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_nocextensions 94,20
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_cextensions 92,20
test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_nocextensions 92,20
@@ -324,7 +324,7 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.6_sqlite_pysqlite_nocextensions 8064
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_cextensions 6220
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_nocextensions 6750
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_cextensions 6790
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_cextensions 6798
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_nocextensions 7320
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_cextensions 7564
test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_nocextensions 8094
@@ -337,7 +337,7 @@ test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_sqlite_pysqlite_nocext
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_cextensions 13744
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_nocextensions 14274
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_cextensions 6234
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_nocextensions 6674
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_nocextensions 6702
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_cextensions 7846
test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_nocextensions 8376
@@ -346,7 +346,7 @@ test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_nocext
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.6_sqlite_pysqlite_nocextensions 1156
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_cextensions 1145
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_nocextensions 1148
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_cextensions 1160
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_cextensions 1139
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_nocextensions 1161
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_cextensions 1151
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_nocextensions 1145
@@ -359,7 +359,7 @@ test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_sqlite_pysqlite_noc
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_cextensions 1254
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_nocextensions 1280
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_cextensions 1247
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_nocextensions 1262
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_nocextensions 1263
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_cextensions 1238
test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_nocextensions 1272
@@ -368,7 +368,7 @@ test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_noc
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.6_sqlite_pysqlite_nocextensions 97
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_cextensions 95
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_nocextensions 95
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_cextensions 95
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_cextensions 96
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 95
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 95
test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 95
@@ -500,7 +500,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.6_sqlite_pysqlite_nocextensions 15439
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 488
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15488
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20477
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20497
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35477
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 419
test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15419
@@ -522,7 +522,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.6_sqlite_pysqlite_nocextensions 15439
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 488
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45488
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20477
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20497
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35477
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 419
test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15419
@@ -541,18 +541,18 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5811,295,3577,11462,1134,1973,2434
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5823,295,3721,11938,1146,2017,2481
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 5833,295,3681,12720,1241,1980,2655
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5591,277,3569,11458,1134,1924,2489
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5613,277,3665,12630,1228,1931,2681
test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5619,277,3705,11902,1144,1966,2532
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5624,277,3801,13074,1238,1970,2724
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5625,277,3809,13110,1240,1975,2733
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6256,402,6599,17140,1146,2569
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6437,410,6761,17665,1159,2627
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 6341,407,6703,18167,1244,2598
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 6228,393,6747,17582,1148,2623
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 6318,398,6851,18609,1234,2652
test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6257,393,6891,18056,1159,2671
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6341,398,6995,19083,1245,2700
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6418,401,7005,19115,1247,2706
diff --git a/test/requirements.py b/test/requirements.py
index db4daca20..fa69a62f1 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -293,7 +293,6 @@ class DefaultRequirements(SuiteRequirements):
named 'test_schema'."""
return skip_if([
- "sqlite",
"firebird"
], "no schema support")
@@ -362,6 +361,32 @@ class DefaultRequirements(SuiteRequirements):
], 'no support for EXCEPT')
@property
+ def parens_in_union_contained_select_w_limit_offset(self):
+ """Target database must support parenthesized SELECT in UNION
+ when LIMIT/OFFSET is specifically present.
+
+ E.g. (SELECT ...) UNION (SELECT ..)
+
+ This is known to fail on SQLite.
+
+ """
+ return fails_if('sqlite')
+
+ @property
+ def parens_in_union_contained_select_wo_limit_offset(self):
+ """Target database must support parenthesized SELECT in UNION
+ when OFFSET/LIMIT is specifically not present.
+
+ E.g. (SELECT ... LIMIT ..) UNION (SELECT .. OFFSET ..)
+
+ This is known to fail on SQLite. It also fails on Oracle
+ because without LIMIT/OFFSET, there is currently no step that
+ creates an additional subquery.
+
+ """
+ return fails_if(['sqlite', 'oracle'])
+
+ @property
def offset(self):
"""Target database must support some method of adding OFFSET or
equivalent to a result set."""
@@ -758,7 +783,7 @@ class DefaultRequirements(SuiteRequirements):
@property
def postgresql_jsonb(self):
- return skip_if(
+ return only_on("postgresql >= 9.4") + skip_if(
lambda config:
config.db.dialect.driver == "pg8000" and
config.db.dialect._dbapi_version <= (1, 10, 1)
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index 04e3171a9..c957b2f8a 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -1643,14 +1643,12 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
s = select([column('foo'), column('bar')])
- # ORDER BY's even though not supported by
- # all DB's, are rendered if requested
self.assert_compile(
union(
s.order_by("foo"),
s.order_by("bar")),
- "SELECT foo, bar ORDER BY foo UNION SELECT foo, bar ORDER BY bar")
- # self_group() is honored
+ "(SELECT foo, bar ORDER BY foo) UNION "
+ "(SELECT foo, bar ORDER BY bar)")
self.assert_compile(
union(s.order_by("foo").self_group(),
s.order_by("bar").limit(10).self_group()),
@@ -1759,6 +1757,67 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT foo, bar FROM bat)"
)
+ # tests for [ticket:2528]
+ # sqlite hates all of these.
+ self.assert_compile(
+ union(
+ s.limit(1),
+ s.offset(2)
+ ),
+ "(SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT -1 OFFSET :param_2)"
+ )
+
+ self.assert_compile(
+ union(
+ s.order_by(column('bar')),
+ s.offset(2)
+ ),
+ "(SELECT foo, bar FROM bat ORDER BY bar) "
+ "UNION (SELECT foo, bar FROM bat LIMIT -1 OFFSET :param_1)"
+ )
+
+ self.assert_compile(
+ union(
+ s.limit(1).alias('a'),
+ s.limit(2).alias('b')
+ ),
+ "(SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT :param_2)"
+ )
+
+ self.assert_compile(
+ union(
+ s.limit(1).self_group(),
+ s.limit(2).self_group()
+ ),
+ "(SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT :param_2)"
+ )
+
+ self.assert_compile(
+ union(s.limit(1), s.limit(2).offset(3)).alias().select(),
+ "SELECT anon_1.foo, anon_1.bar FROM "
+ "((SELECT foo, bar FROM bat LIMIT :param_1) "
+ "UNION (SELECT foo, bar FROM bat LIMIT :param_2 OFFSET :param_3)) "
+ "AS anon_1"
+ )
+
+ # this version works for SQLite
+ self.assert_compile(
+ union(
+ s.limit(1).alias().select(),
+ s.offset(2).alias().select(),
+ ),
+ "SELECT anon_1.foo, anon_1.bar "
+ "FROM (SELECT foo, bar FROM bat"
+ " LIMIT :param_1) AS anon_1 "
+ "UNION SELECT anon_2.foo, anon_2.bar "
+ "FROM (SELECT foo, bar "
+ "FROM bat"
+ " LIMIT -1 OFFSET :param_2) AS anon_2"
+ )
+
def test_binds(self):
for (
stmt,
@@ -2040,6 +2099,8 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
'Incorrect number of expected results')
eq_(str(cast(tbl.c.v1, Numeric).compile(dialect=dialect)),
'CAST(casttest.v1 AS %s)' % expected_results[0])
+ eq_(str(tbl.c.v1.cast(Numeric).compile(dialect=dialect)),
+ 'CAST(casttest.v1 AS %s)' % expected_results[0])
eq_(str(cast(tbl.c.v1, Numeric(12, 9)).compile(dialect=dialect)),
'CAST(casttest.v1 AS %s)' % expected_results[1])
eq_(str(cast(tbl.c.ts, Date).compile(dialect=dialect)),
@@ -3423,13 +3484,15 @@ class ResultMapTest(fixtures.TestBase):
tc = type_coerce(t.c.a, String)
stmt = select([t.c.a, l1, tc])
comp = stmt.compile()
- tc_anon_label = comp._create_result_map()['a_1'][1][0]
+ tc_anon_label = comp._create_result_map()['anon_1'][1][0]
eq_(
comp._create_result_map(),
{
'a': ('a', (t.c.a, 'a', 'a'), t.c.a.type),
'bar': ('bar', (l1, 'bar'), l1.type),
- 'a_1': ('%%(%d a)s' % id(tc), (tc_anon_label, 'a_1'), tc.type),
+ 'anon_1': (
+ '%%(%d anon)s' % id(tc),
+ (tc_anon_label, 'anon_1', tc), tc.type),
},
)
diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py
index 7f4d5d30a..673085cf7 100644
--- a/test/sql/test_defaults.py
+++ b/test/sql/test_defaults.py
@@ -123,6 +123,14 @@ class DefaultTest(fixtures.TestBase):
def gen_default(cls, ctx):
return "hi"
+ class MyType(TypeDecorator):
+ impl = String(50)
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = "BIND" + value
+ return value
+
# select "count(1)" returns different results on different DBs also
# correct for "current_date" compatible as column default, value
# differences
@@ -211,7 +219,10 @@ class DefaultTest(fixtures.TestBase):
server_default='ddl'),
# python method w/ context
- Column('col10', String(20), default=MyClass.gen_default)
+ Column('col10', String(20), default=MyClass.gen_default),
+
+ # fixed default w/ type that has bound processor
+ Column('col11', MyType(), default='foo')
)
t.create()
@@ -391,7 +402,7 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(), [
(x, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi')
+ 12, today, 'py', 'hi', 'BINDfoo')
for x in range(51, 54)])
t.insert().execute(col9=None)
@@ -401,7 +412,7 @@ class DefaultTest(fixtures.TestBase):
eq_(t.select(t.c.col1 == 54).execute().fetchall(),
[(54, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, None, 'hi')])
+ 12, today, None, 'hi', 'BINDfoo')])
def test_insertmany(self):
t.insert().execute({}, {}, {})
@@ -411,11 +422,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(52, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(53, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi')])
+ 12, today, 'py', 'hi', 'BINDfoo')])
@testing.requires.multivalues_inserts
def test_insert_multivalues(self):
@@ -427,11 +438,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(52, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi'),
+ 12, today, 'py', 'hi', 'BINDfoo'),
(53, 'imthedefault', f, ts, ts, ctexec, True, False,
- 12, today, 'py', 'hi')])
+ 12, today, 'py', 'hi', 'BINDfoo')])
def test_no_embed_in_sql(self):
"""Using a DefaultGenerator, Sequence, DefaultClause
@@ -498,11 +509,11 @@ class DefaultTest(fixtures.TestBase):
today = datetime.date.today()
eq_(l.fetchall(),
[(51, 'im the update', f2, ts, ts, ctexec, False, False,
- 13, today, 'py', 'hi'),
+ 13, today, 'py', 'hi', 'BINDfoo'),
(52, 'im the update', f2, ts, ts, ctexec, True, False,
- 13, today, 'py', 'hi'),
+ 13, today, 'py', 'hi', 'BINDfoo'),
(53, 'im the update', f2, ts, ts, ctexec, True, False,
- 13, today, 'py', 'hi')])
+ 13, today, 'py', 'hi', 'BINDfoo')])
@testing.fails_on('firebird', 'Data type unknown')
def test_update(self):
@@ -514,7 +525,7 @@ class DefaultTest(fixtures.TestBase):
l = l.first()
eq_(l,
(pk, 'im the update', f2, None, None, ctexec, True, False,
- 13, datetime.date.today(), 'py', 'hi'))
+ 13, datetime.date.today(), 'py', 'hi', 'BINDfoo'))
eq_(11, f2)
@testing.fails_on('firebird', 'Data type unknown')
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index ec8d9b5c0..51cfcb919 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -1,20 +1,20 @@
-from sqlalchemy.testing import eq_
+from sqlalchemy.testing import eq_, is_
import datetime
from sqlalchemy import func, select, Integer, literal, DateTime, Table, \
Column, Sequence, MetaData, extract, Date, String, bindparam, \
- literal_column
+ literal_column, Array, Numeric
from sqlalchemy.sql import table, column
from sqlalchemy import sql, util
from sqlalchemy.sql.compiler import BIND_TEMPLATES
from sqlalchemy.testing.engines import all_dialects
from sqlalchemy import types as sqltypes
from sqlalchemy.sql import functions
-from sqlalchemy.sql.functions import GenericFunction
+from sqlalchemy.sql.functions import GenericFunction, FunctionElement
import decimal
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, engines
from sqlalchemy.dialects import sqlite, postgresql, mysql, oracle
-
+from sqlalchemy.testing import assert_raises_message
table1 = table('mytable',
column('myid', Integer),
@@ -52,7 +52,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
fake_func('foo'),
"fake_func(%s)" %
- bindtemplate % {'name': 'param_1', 'position': 1},
+ bindtemplate % {'name': 'fake_func_1', 'position': 1},
dialect=dialect)
def test_use_labels(self):
@@ -89,7 +89,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_generic_annotation(self):
fn = func.coalesce('x', 'y')._annotate({"foo": "bar"})
self.assert_compile(
- fn, "coalesce(:param_1, :param_2)"
+ fn, "coalesce(:coalesce_1, :coalesce_2)"
)
def test_custom_default_namespace(self):
@@ -140,7 +140,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
func.my_func(1, 2),
- "my_func(:param_1, :param_2, :param_3)"
+ "my_func(:my_func_1, :my_func_2, :my_func_3)"
)
def test_custom_registered_identifier(self):
@@ -178,7 +178,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
self.assert_compile(
myfunc(1, 2, 3),
- "myfunc(:param_1, :param_2, :param_3)"
+ "myfunc(:myfunc_1, :myfunc_2, :myfunc_3)"
)
def test_namespacing_conflicts(self):
@@ -188,7 +188,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
assert isinstance(func.count().type, sqltypes.Integer)
self.assert_compile(func.count(), 'count(*)')
- self.assert_compile(func.count(1), 'count(:param_1)')
+ self.assert_compile(func.count(1), 'count(:count_1)')
c = column('abc')
self.assert_compile(func.count(c), 'count(abc)')
@@ -378,7 +378,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
def test_funcfilter_empty(self):
self.assert_compile(
func.count(1).filter(),
- "count(:param_1)"
+ "count(:count_1)"
)
def test_funcfilter_criterion(self):
@@ -386,7 +386,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
func.count(1).filter(
table1.c.name != None
),
- "count(:param_1) FILTER (WHERE mytable.name IS NOT NULL)"
+ "count(:count_1) FILTER (WHERE mytable.name IS NOT NULL)"
)
def test_funcfilter_compound_criterion(self):
@@ -395,7 +395,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
table1.c.name == None,
table1.c.myid > 0
),
- "count(:param_1) FILTER (WHERE mytable.name IS NULL AND "
+ "count(:count_1) FILTER (WHERE mytable.name IS NULL AND "
"mytable.myid > :myid_1)"
)
@@ -404,7 +404,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
select([func.count(1).filter(
table1.c.description != None
).label('foo')]),
- "SELECT count(:param_1) FILTER (WHERE mytable.description "
+ "SELECT count(:count_1) FILTER (WHERE mytable.description "
"IS NOT NULL) AS foo FROM mytable"
)
@@ -429,7 +429,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
table1.c.name == 'name'
)
]),
- "SELECT count(:param_1) FILTER (WHERE mytable.name = :name_1) "
+ "SELECT count(:count_1) FILTER (WHERE mytable.name = :name_1) "
"AS anon_1 FROM mytable"
)
@@ -443,7 +443,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
table1.c.description == 'description'
)
]),
- "SELECT count(:param_1) FILTER (WHERE "
+ "SELECT count(:count_1) FILTER (WHERE "
"mytable.name = :name_1 AND mytable.description = :description_1) "
"AS anon_1 FROM mytable"
)
@@ -477,6 +477,121 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"AS anon_1 FROM mytable"
)
+ def test_funcfilter_within_group(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name
+ )
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name) "
+ "AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_funcfilter_within_group_multi(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name, table1.c.description
+ )
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name, mytable.description) "
+ "AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_funcfilter_within_group_desc(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name.desc()
+ )
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name DESC) "
+ "AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_funcfilter_within_group_w_over(self):
+ stmt = select([
+ table1.c.myid,
+ func.percentile_cont(0.5).within_group(
+ table1.c.name.desc()
+ ).over(partition_by=table1.c.description)
+ ])
+ self.assert_compile(
+ stmt,
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name DESC) "
+ "OVER (PARTITION BY mytable.description) AS anon_1 "
+ "FROM mytable",
+ {'percentile_cont_1': 0.5}
+ )
+
+ def test_incorrect_none_type(self):
+ class MissingType(FunctionElement):
+ name = 'mt'
+ type = None
+
+ assert_raises_message(
+ TypeError,
+ "Object None associated with '.type' attribute is "
+ "not a TypeEngine class or object",
+ MissingType().compile
+ )
+
+
+class ReturnTypeTest(fixtures.TestBase):
+
+ def test_array_agg(self):
+ expr = func.array_agg(column('data', Integer))
+ is_(expr.type._type_affinity, Array)
+ is_(expr.type.item_type._type_affinity, Integer)
+
+ def test_mode(self):
+ expr = func.mode(0.5).within_group(
+ column('data', Integer).desc())
+ is_(expr.type._type_affinity, Integer)
+
+ def test_percentile_cont(self):
+ expr = func.percentile_cont(0.5).within_group(column('data', Integer))
+ is_(expr.type._type_affinity, Integer)
+
+ def test_percentile_cont_array(self):
+ expr = func.percentile_cont(0.5, 0.7).within_group(
+ column('data', Integer))
+ is_(expr.type._type_affinity, Array)
+ is_(expr.type.item_type._type_affinity, Integer)
+
+ def test_percentile_cont_array_desc(self):
+ expr = func.percentile_cont(0.5, 0.7).within_group(
+ column('data', Integer).desc())
+ is_(expr.type._type_affinity, Array)
+ is_(expr.type.item_type._type_affinity, Integer)
+
+ def test_cume_dist(self):
+ expr = func.cume_dist(0.5).within_group(
+ column('data', Integer).desc())
+ is_(expr.type._type_affinity, Numeric)
+
+ def test_percent_rank(self):
+ expr = func.percent_rank(0.5).within_group(
+ column('data', Integer))
+ is_(expr.type._type_affinity, Numeric)
+
class ExecuteTest(fixtures.TestBase):
diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py
index 3c533d75f..f66f0b391 100644
--- a/test/sql/test_insert.py
+++ b/test/sql/test_insert.py
@@ -5,7 +5,7 @@ from sqlalchemy import Column, Integer, MetaData, String, Table,\
from sqlalchemy.dialects import mysql, postgresql
from sqlalchemy.engine import default
from sqlalchemy.testing import AssertsCompiledSQL,\
- assert_raises_message, fixtures
+ assert_raises_message, fixtures, eq_
from sqlalchemy.sql import crud
@@ -694,8 +694,21 @@ class MultirowTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'foo_2': None # evaluated later
}
+ stmt = table.insert().values(values)
+
+ eq_(
+ dict([
+ (k, v.type._type_affinity)
+ for (k, v) in
+ stmt.compile(dialect=postgresql.dialect()).binds.items()]),
+ {
+ 'foo': Integer, 'data_2': String, 'id_0': Integer,
+ 'id_2': Integer, 'foo_1': Integer, 'data_1': String,
+ 'id_1': Integer, 'foo_2': Integer, 'data_0': String}
+ )
+
self.assert_compile(
- table.insert().values(values),
+ stmt,
'INSERT INTO sometable (id, data, foo) VALUES '
'(%(id_0)s, %(data_0)s, %(foo)s), '
'(%(id_1)s, %(data_1)s, %(foo_1)s), '
@@ -728,8 +741,20 @@ class MultirowTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL):
'foo_2': None, # evaluated later
}
+ stmt = table.insert().values(values)
+ eq_(
+ dict([
+ (k, v.type._type_affinity)
+ for (k, v) in
+ stmt.compile(dialect=postgresql.dialect()).binds.items()]),
+ {
+ 'foo': Integer, 'data_2': String, 'id_0': Integer,
+ 'id_2': Integer, 'foo_1': Integer, 'data_1': String,
+ 'id_1': Integer, 'foo_2': Integer, 'data_0': String}
+ )
+
self.assert_compile(
- table.insert().values(values),
+ stmt,
"INSERT INTO sometable (id, data, foo) VALUES "
"(%(id_0)s, %(data_0)s, %(foo)s), "
"(%(id_1)s, %(data_1)s, %(foo_1)s), "
diff --git a/test/sql/test_insert_exec.py b/test/sql/test_insert_exec.py
new file mode 100644
index 000000000..c49947425
--- /dev/null
+++ b/test/sql/test_insert_exec.py
@@ -0,0 +1,445 @@
+from sqlalchemy.testing import eq_, assert_raises_message, is_
+from sqlalchemy import testing
+from sqlalchemy.testing import fixtures, engines
+from sqlalchemy import (
+ exc, sql, String, Integer, MetaData, and_, ForeignKey,
+ VARCHAR, INT, Sequence, func)
+from sqlalchemy.testing.schema import Table, Column
+
+
+class InsertExecTest(fixtures.TablesTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'users', metadata,
+ Column(
+ 'user_id', INT, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True
+ )
+
+ @testing.requires.multivalues_inserts
+ def test_multivalues_insert(self):
+ users = self.tables.users
+ users.insert(
+ values=[
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'}]).execute()
+ rows = users.select().order_by(users.c.user_id).execute().fetchall()
+ eq_(rows[0], (7, 'jack'))
+ eq_(rows[1], (8, 'ed'))
+ users.insert(values=[(9, 'jack'), (10, 'ed')]).execute()
+ rows = users.select().order_by(users.c.user_id).execute().fetchall()
+ eq_(rows[2], (9, 'jack'))
+ eq_(rows[3], (10, 'ed'))
+
+ def test_insert_heterogeneous_params(self):
+ """test that executemany parameters are asserted to match the
+ parameter set of the first."""
+ users = self.tables.users
+
+ assert_raises_message(
+ exc.StatementError,
+ r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for "
+ "bind parameter 'user_name', in "
+ "parameter group 2 "
+ r"\[SQL: u?'INSERT INTO users",
+ users.insert().execute,
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9}
+ )
+
+ # this succeeds however. We aren't yet doing
+ # a length check on all subsequent parameters.
+ users.insert().execute(
+ {'user_id': 7},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9}
+ )
+
+ def _test_lastrow_accessor(self, table_, values, assertvalues):
+ """Tests the inserted_primary_key and lastrow_has_id() functions."""
+
+ def insert_values(engine, table_, values):
+ """
+ Inserts a row into a table, returns the full list of values
+ INSERTed including defaults that fired off on the DB side and
+ detects rows that had defaults and post-fetches.
+ """
+
+ # verify implicit_returning is working
+ if engine.dialect.implicit_returning:
+ ins = table_.insert()
+ comp = ins.compile(engine, column_keys=list(values))
+ if not set(values).issuperset(
+ c.key for c in table_.primary_key):
+ is_(bool(comp.returning), True)
+
+ result = engine.execute(table_.insert(), **values)
+ ret = values.copy()
+
+ for col, id in zip(
+ table_.primary_key, result.inserted_primary_key):
+ ret[col.key] = id
+
+ if result.lastrow_has_defaults():
+ criterion = and_(
+ *[
+ col == id for col, id in
+ zip(table_.primary_key, result.inserted_primary_key)])
+ row = engine.execute(table_.select(criterion)).first()
+ for c in table_.c:
+ ret[c.key] = row[c]
+ return ret
+
+ if testing.against('firebird', 'postgresql', 'oracle', 'mssql'):
+ assert testing.db.dialect.implicit_returning
+
+ if testing.db.dialect.implicit_returning:
+ test_engines = [
+ engines.testing_engine(options={'implicit_returning': False}),
+ engines.testing_engine(options={'implicit_returning': True}),
+ ]
+ else:
+ test_engines = [testing.db]
+
+ for engine in test_engines:
+ try:
+ table_.create(bind=engine, checkfirst=True)
+ i = insert_values(engine, table_, values)
+ eq_(i, assertvalues)
+ finally:
+ table_.drop(bind=engine)
+
+ @testing.skip_if('sqlite')
+ def test_lastrow_accessor_one(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t1", metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('foo', String(30), primary_key=True)),
+ {'foo': 'hi'},
+ {'id': 1, 'foo': 'hi'}
+ )
+
+ @testing.skip_if('sqlite')
+ def test_lastrow_accessor_two(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t2", metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('foo', String(30), primary_key=True),
+ Column('bar', String(30), server_default='hi')
+ ),
+ {'foo': 'hi'},
+ {'id': 1, 'foo': 'hi', 'bar': 'hi'}
+ )
+
+ def test_lastrow_accessor_three(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t3", metadata,
+ Column("id", String(40), primary_key=True),
+ Column('foo', String(30), primary_key=True),
+ Column("bar", String(30))
+ ),
+ {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"},
+ {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"}
+ )
+
+ def test_lastrow_accessor_four(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t4", metadata,
+ Column(
+ 'id', Integer,
+ Sequence('t4_id_seq', optional=True),
+ primary_key=True),
+ Column('foo', String(30), primary_key=True),
+ Column('bar', String(30), server_default='hi')
+ ),
+ {'foo': 'hi', 'id': 1},
+ {'id': 1, 'foo': 'hi', 'bar': 'hi'}
+ )
+
+ def test_lastrow_accessor_five(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t5", metadata,
+ Column('id', String(10), primary_key=True),
+ Column('bar', String(30), server_default='hi')
+ ),
+ {'id': 'id1'},
+ {'id': 'id1', 'bar': 'hi'},
+ )
+
+ @testing.skip_if('sqlite')
+ def test_lastrow_accessor_six(self):
+ metadata = MetaData()
+ self._test_lastrow_accessor(
+ Table(
+ "t6", metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('bar', Integer, primary_key=True)
+ ),
+ {'bar': 0},
+ {'id': 1, 'bar': 0},
+ )
+
+ # TODO: why not in the sqlite suite?
+ @testing.only_on('sqlite+pysqlite')
+ @testing.provide_metadata
+ def test_lastrowid_zero(self):
+ from sqlalchemy.dialects import sqlite
+ eng = engines.testing_engine()
+
+ class ExcCtx(sqlite.base.SQLiteExecutionContext):
+
+ def get_lastrowid(self):
+ return 0
+ eng.dialect.execution_ctx_cls = ExcCtx
+ t = Table(
+ 't', self.metadata, Column('x', Integer, primary_key=True),
+ Column('y', Integer))
+ t.create(eng)
+ r = eng.execute(t.insert().values(y=5))
+ eq_(r.inserted_primary_key, [0])
+
+ @testing.fails_on(
+ 'sqlite', "sqlite autoincremnt doesn't work with composite pks")
+ @testing.provide_metadata
+ def test_misordered_lastrow(self):
+ metadata = self.metadata
+
+ related = Table(
+ 'related', metadata,
+ Column('id', Integer, primary_key=True),
+ mysql_engine='MyISAM'
+ )
+ t6 = Table(
+ "t6", metadata,
+ Column(
+ 'manual_id', Integer, ForeignKey('related.id'),
+ primary_key=True),
+ Column(
+ 'auto_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ mysql_engine='MyISAM'
+ )
+
+ metadata.create_all()
+ r = related.insert().values(id=12).execute()
+ id_ = r.inserted_primary_key[0]
+ eq_(id_, 12)
+
+ r = t6.insert().values(manual_id=id_).execute()
+ eq_(r.inserted_primary_key, [12, 1])
+
+ def test_implicit_id_insert_select_columns(self):
+ users = self.tables.users
+ stmt = users.insert().from_select(
+ (users.c.user_id, users.c.user_name),
+ users.select().where(users.c.user_id == 20))
+
+ testing.db.execute(stmt)
+
+ def test_implicit_id_insert_select_keys(self):
+ users = self.tables.users
+ stmt = users.insert().from_select(
+ ["user_id", "user_name"],
+ users.select().where(users.c.user_id == 20))
+
+ testing.db.execute(stmt)
+
+ @testing.requires.empty_inserts
+ @testing.requires.returning
+ def test_no_inserted_pk_on_returning(self):
+ users = self.tables.users
+ result = testing.db.execute(users.insert().returning(
+ users.c.user_id, users.c.user_name))
+ assert_raises_message(
+ exc.InvalidRequestError,
+ r"Can't call inserted_primary_key when returning\(\) is used.",
+ getattr, result, 'inserted_primary_key'
+ )
+
+
+class TableInsertTest(fixtures.TablesTest):
+
+ """test for consistent insert behavior across dialects
+ regarding the inline=True flag, lower-case 't' tables.
+
+ """
+ run_create_tables = 'each'
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'foo', metadata,
+ Column('id', Integer, Sequence('t_id_seq'), primary_key=True),
+ Column('data', String(50)),
+ Column('x', Integer)
+ )
+
+ def _fixture(self, types=True):
+ if types:
+ t = sql.table(
+ 'foo', sql.column('id', Integer),
+ sql.column('data', String),
+ sql.column('x', Integer))
+ else:
+ t = sql.table(
+ 'foo', sql.column('id'), sql.column('data'), sql.column('x'))
+ return t
+
+ def _test(self, stmt, row, returning=None, inserted_primary_key=False):
+ r = testing.db.execute(stmt)
+
+ if returning:
+ returned = r.first()
+ eq_(returned, returning)
+ elif inserted_primary_key is not False:
+ eq_(r.inserted_primary_key, inserted_primary_key)
+
+ eq_(testing.db.execute(self.tables.foo.select()).first(), row)
+
+ def _test_multi(self, stmt, rows, data):
+ testing.db.execute(stmt, rows)
+ eq_(
+ testing.db.execute(
+ self.tables.foo.select().
+ order_by(self.tables.foo.c.id)).fetchall(),
+ data)
+
+ @testing.requires.sequences
+ def test_expicit_sequence(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(
+ id=func.next_value(Sequence('t_id_seq')), data='data', x=5),
+ (1, 'data', 5)
+ )
+
+ def test_uppercase(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ def test_uppercase_inline(self):
+ t = self.tables.foo
+ self._test(
+ t.insert(inline=True).values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ @testing.crashes(
+ "mssql+pyodbc",
+ "Pyodbc + SQL Server + Py3K, some decimal handling issue")
+ def test_uppercase_inline_implicit(self):
+ t = self.tables.foo
+ self._test(
+ t.insert(inline=True).values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[None]
+ )
+
+ def test_uppercase_implicit(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ def test_uppercase_direct_params(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[1]
+ )
+
+ @testing.requires.returning
+ def test_uppercase_direct_params_returning(self):
+ t = self.tables.foo
+ self._test(
+ t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
+ (1, 'data', 5),
+ returning=(1, 5)
+ )
+
+ @testing.fails_on(
+ 'mssql', "lowercase table doesn't support identity insert disable")
+ def test_direct_params(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(id=1, data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[]
+ )
+
+ @testing.fails_on(
+ 'mssql', "lowercase table doesn't support identity insert disable")
+ @testing.requires.returning
+ def test_direct_params_returning(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
+ (1, 'data', 5),
+ returning=(1, 5)
+ )
+
+ @testing.requires.emulated_lastrowid
+ def test_implicit_pk(self):
+ t = self._fixture()
+ self._test(
+ t.insert().values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[]
+ )
+
+ @testing.requires.emulated_lastrowid
+ def test_implicit_pk_multi_rows(self):
+ t = self._fixture()
+ self._test_multi(
+ t.insert(),
+ [
+ {'data': 'd1', 'x': 5},
+ {'data': 'd2', 'x': 6},
+ {'data': 'd3', 'x': 7},
+ ],
+ [
+ (1, 'd1', 5),
+ (2, 'd2', 6),
+ (3, 'd3', 7)
+ ],
+ )
+
+ @testing.requires.emulated_lastrowid
+ def test_implicit_pk_inline(self):
+ t = self._fixture()
+ self._test(
+ t.insert(inline=True).values(data='data', x=5),
+ (1, 'data', 5),
+ inserted_primary_key=[]
+ )
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index 65d1e3716..03c0f89be 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -1,7 +1,8 @@
from sqlalchemy.testing import fixtures, eq_, is_, is_not_
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
-from sqlalchemy.sql import column, desc, asc, literal, collate, null, true, false
+from sqlalchemy.sql import column, desc, asc, literal, collate, null, \
+ true, false, any_, all_
from sqlalchemy.sql.expression import BinaryExpression, \
ClauseList, Grouping, \
UnaryExpression, select, union, func, tuple_
@@ -12,8 +13,9 @@ from sqlalchemy import exc
from sqlalchemy.engine import default
from sqlalchemy.sql.elements import _literal_as_text
from sqlalchemy.schema import Column, Table, MetaData
+from sqlalchemy.sql import compiler
from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, \
- Boolean, NullType, MatchType
+ Boolean, NullType, MatchType, Indexable, Concatenable, Array
from sqlalchemy.dialects import mysql, firebird, postgresql, oracle, \
sqlite, mssql
from sqlalchemy import util
@@ -21,7 +23,6 @@ import datetime
import collections
from sqlalchemy import text, literal_column
from sqlalchemy import and_, not_, between, or_
-from sqlalchemy.sql import true, false, null
class LoopOperate(operators.ColumnOperators):
@@ -210,6 +211,60 @@ class DefaultColumnComparatorTest(fixtures.TestBase):
def test_concat(self):
self._do_operate_test(operators.concat_op)
+ def test_default_adapt(self):
+ class TypeOne(TypeEngine):
+ pass
+
+ class TypeTwo(TypeEngine):
+ pass
+
+ expr = column('x', TypeOne()) - column('y', TypeTwo())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+
+ def test_concatenable_adapt(self):
+ class TypeOne(Concatenable, TypeEngine):
+ pass
+
+ class TypeTwo(Concatenable, TypeEngine):
+ pass
+
+ class TypeThree(TypeEngine):
+ pass
+
+ expr = column('x', TypeOne()) - column('y', TypeTwo())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operator.sub
+ )
+
+ expr = column('x', TypeOne()) + column('y', TypeTwo())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operators.concat_op
+ )
+
+ expr = column('x', TypeOne()) - column('y', TypeThree())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operator.sub
+ )
+
+ expr = column('x', TypeOne()) + column('y', TypeThree())
+ is_(
+ expr.type._type_affinity, TypeOne
+ )
+ is_(
+ expr.operator, operator.add
+ )
+
class CustomUnaryOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -577,6 +632,200 @@ class ExtensionOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
)
+class IndexableTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ def setUp(self):
+ class MyTypeCompiler(compiler.GenericTypeCompiler):
+ def visit_mytype(self, type, **kw):
+ return "MYTYPE"
+
+ def visit_myothertype(self, type, **kw):
+ return "MYOTHERTYPE"
+
+ class MyCompiler(compiler.SQLCompiler):
+ def visit_slice(self, element, **kw):
+ return "%s:%s" % (
+ self.process(element.start, **kw),
+ self.process(element.stop, **kw),
+ )
+
+ def visit_getitem_binary(self, binary, operator, **kw):
+ return "%s[%s]" % (
+ self.process(binary.left, **kw),
+ self.process(binary.right, **kw)
+ )
+
+ class MyDialect(default.DefaultDialect):
+ statement_compiler = MyCompiler
+ type_compiler = MyTypeCompiler
+
+ class MyType(Indexable, TypeEngine):
+ __visit_name__ = 'mytype'
+
+ def __init__(self, zero_indexes=False, dimensions=1):
+ if zero_indexes:
+ self.zero_indexes = zero_indexes
+ self.dimensions = dimensions
+
+ class Comparator(Indexable.Comparator):
+ def _setup_getitem(self, index):
+ if isinstance(index, slice):
+ return_type = self.type
+ elif self.type.dimensions is None or \
+ self.type.dimensions == 1:
+ return_type = Integer()
+ else:
+ adapt_kw = {'dimensions': self.type.dimensions - 1}
+ # this is also testing the behavior of adapt()
+ # that we can pass kw that override constructor kws.
+ # required a small change to util.constructor_copy().
+ return_type = self.type.adapt(
+ self.type.__class__, **adapt_kw)
+
+ return operators.getitem, index, return_type
+ comparator_factory = Comparator
+
+ self.MyType = MyType
+ self.__dialect__ = MyDialect()
+
+ def test_setup_getitem_w_dims(self):
+ """test the behavior of the _setup_getitem() method given a simple
+ 'dimensions' scheme - this is identical to postgresql.ARRAY."""
+
+ col = Column('x', self.MyType(dimensions=3))
+
+ is_(
+ col[5].type._type_affinity, self.MyType
+ )
+ eq_(
+ col[5].type.dimensions, 2
+ )
+ is_(
+ col[5][6].type._type_affinity, self.MyType
+ )
+ eq_(
+ col[5][6].type.dimensions, 1
+ )
+ is_(
+ col[5][6][7].type._type_affinity, Integer
+ )
+
+ def test_getindex_literal(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[5],
+ "x[:x_1]",
+ checkparams={'x_1': 5}
+ )
+
+ def test_getindex_sqlexpr(self):
+
+ col = Column('x', self.MyType())
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2],
+ "x[y]",
+ checkparams={}
+ )
+
+ self.assert_compile(
+ col[col2 + 8],
+ "x[(y + :y_1)]",
+ checkparams={'y_1': 8}
+ )
+
+ def test_getslice_literal(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[5:6],
+ "x[:x_1::x_2]",
+ checkparams={'x_1': 5, 'x_2': 6}
+ )
+
+ def test_getslice_sqlexpr(self):
+
+ col = Column('x', self.MyType())
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2:col2 + 5],
+ "x[y:y + :y_1]",
+ checkparams={'y_1': 5}
+ )
+
+ def test_getindex_literal_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+
+ self.assert_compile(
+ col[5],
+ "x[:x_1]",
+ checkparams={'x_1': 6}
+ )
+
+ def test_getindex_sqlexpr_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2],
+ "x[(y + :y_1)]",
+ checkparams={'y_1': 1}
+ )
+
+ self.assert_compile(
+ col[col2 + 8],
+ "x[(y + :y_1 + :param_1)]",
+ checkparams={'y_1': 8, 'param_1': 1}
+ )
+
+ def test_getslice_literal_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+
+ self.assert_compile(
+ col[5:6],
+ "x[:x_1::x_2]",
+ checkparams={'x_1': 6, 'x_2': 7}
+ )
+
+ def test_getslice_sqlexpr_zeroind(self):
+
+ col = Column('x', self.MyType(zero_indexes=True))
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2:col2 + 5],
+ "x[y + :y_1:y + :y_2 + :param_1]",
+ checkparams={'y_1': 1, 'y_2': 5, 'param_1': 1}
+ )
+
+ def test_override_operators(self):
+ special_index_op = operators.custom_op('->')
+
+ class MyOtherType(Indexable, TypeEngine):
+ __visit_name__ = 'myothertype'
+
+ class Comparator(TypeEngine.Comparator):
+
+ def _adapt_expression(self, op, other_comparator):
+ return special_index_op, MyOtherType()
+
+ comparator_factory = Comparator
+
+ col = Column('x', MyOtherType())
+ self.assert_compile(
+ col[5],
+ "x -> :x_1",
+ checkparams={'x_1': 5}
+ )
+
+
class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"""test standalone booleans being wrapped in an AsBoolean, as well
@@ -825,6 +1074,64 @@ class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"SELECT false AS anon_1, false AS anon_2"
)
+ def test_is_true_literal(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c.is_(True),
+ "x IS true"
+ )
+
+ def test_is_false_literal(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c.is_(False),
+ "x IS false"
+ )
+
+ def test_and_false_literal_leading(self):
+ self.assert_compile(
+ and_(False, True),
+ "false"
+ )
+
+ self.assert_compile(
+ and_(False, False),
+ "false"
+ )
+
+ def test_and_true_literal_leading(self):
+ self.assert_compile(
+ and_(True, True),
+ "true"
+ )
+
+ self.assert_compile(
+ and_(True, False),
+ "false"
+ )
+
+ def test_or_false_literal_leading(self):
+ self.assert_compile(
+ or_(False, True),
+ "true"
+ )
+
+ self.assert_compile(
+ or_(False, False),
+ "false"
+ )
+
+ def test_or_true_literal_leading(self):
+ self.assert_compile(
+ or_(True, True),
+ "true"
+ )
+
+ self.assert_compile(
+ or_(True, False),
+ "true"
+ )
+
class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -1956,3 +2263,154 @@ class TupleTypingTest(fixtures.TestBase):
eq_(len(expr.right.clauses), 2)
for elem in expr.right.clauses:
self._assert_types(elem)
+
+
+class AnyAllTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ __dialect__ = 'default'
+
+ def _fixture(self):
+ m = MetaData()
+
+ t = Table(
+ 'tab1', m,
+ Column('arrval', Array(Integer)),
+ Column('data', Integer)
+ )
+ return t
+
+ def test_any_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == any_(t.c.arrval),
+ ":param_1 = ANY (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_all_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == all_(t.c.arrval),
+ ":param_1 = ALL (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_any_comparator_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 > any_(t.c.arrval),
+ ":param_1 > ANY (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_all_comparator_array(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 > all_(t.c.arrval),
+ ":param_1 > ALL (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_any_comparator_array_wexpr(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.data > any_(t.c.arrval),
+ "tab1.data > ANY (tab1.arrval)",
+ checkparams={}
+ )
+
+ def test_all_comparator_array_wexpr(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.data > all_(t.c.arrval),
+ "tab1.data > ALL (tab1.arrval)",
+ checkparams={}
+ )
+
+ def test_illegal_ops(self):
+ t = self._fixture()
+
+ assert_raises_message(
+ exc.ArgumentError,
+ "Only comparison operators may be used with ANY/ALL",
+ lambda: 5 + all_(t.c.arrval)
+ )
+
+ # TODO:
+ # this is invalid but doesn't raise an error,
+ # as the left-hand side just does its thing. Types
+ # would need to reject their right-hand side.
+ self.assert_compile(
+ t.c.data + all_(t.c.arrval),
+ "tab1.data + ALL (tab1.arrval)"
+ )
+
+ def test_any_array_comparator_accessor(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.arrval.any(5, operator.gt),
+ ":param_1 > ANY (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_all_array_comparator_accessor(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ t.c.arrval.all(5, operator.gt),
+ ":param_1 > ALL (tab1.arrval)",
+ checkparams={"param_1": 5}
+ )
+
+ def test_any_array_expression(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == any_(t.c.arrval[5:6] + postgresql.array([3, 4])),
+ "%(param_1)s = ANY (tab1.arrval[%(arrval_1)s:%(arrval_2)s] || "
+ "ARRAY[%(param_2)s, %(param_3)s])",
+ checkparams={
+ 'arrval_2': 6, 'param_1': 5, 'param_3': 4,
+ 'arrval_1': 5, 'param_2': 3},
+ dialect='postgresql'
+ )
+
+ def test_all_array_expression(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == all_(t.c.arrval[5:6] + postgresql.array([3, 4])),
+ "%(param_1)s = ALL (tab1.arrval[%(arrval_1)s:%(arrval_2)s] || "
+ "ARRAY[%(param_2)s, %(param_3)s])",
+ checkparams={
+ 'arrval_2': 6, 'param_1': 5, 'param_3': 4,
+ 'arrval_1': 5, 'param_2': 3},
+ dialect='postgresql'
+ )
+
+ def test_any_subq(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == any_(select([t.c.data]).where(t.c.data < 10)),
+ ":param_1 = ANY (SELECT tab1.data "
+ "FROM tab1 WHERE tab1.data < :data_1)",
+ checkparams={'data_1': 10, 'param_1': 5}
+ )
+
+ def test_all_subq(self):
+ t = self._fixture()
+
+ self.assert_compile(
+ 5 == all_(select([t.c.data]).where(t.c.data < 10)),
+ ":param_1 = ALL (SELECT tab1.data "
+ "FROM tab1 WHERE tab1.data < :data_1)",
+ checkparams={'data_1': 10, 'param_1': 5}
+ )
+
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 98f375018..aca933fc9 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -1,13 +1,13 @@
-from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, is_
+from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, \
+ is_, in_, not_in_
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, engines
-from sqlalchemy import util
from sqlalchemy import (
exc, sql, func, select, String, Integer, MetaData, and_, ForeignKey,
- union, intersect, except_, union_all, VARCHAR, INT, CHAR, text, Sequence,
- bindparam, literal, not_, type_coerce, literal_column, desc, asc,
- TypeDecorator, or_, cast, table, column)
-from sqlalchemy.engine import default, result as _result
+ union, intersect, except_, union_all, VARCHAR, INT, text,
+ bindparam, literal, not_, literal_column, desc, asc,
+ TypeDecorator, or_, cast)
+from sqlalchemy.engine import default
from sqlalchemy.testing.schema import Table, Column
# ongoing - these are old tests. those which are of general use
@@ -61,260 +61,6 @@ class QueryTest(fixtures.TestBase):
def teardown_class(cls):
metadata.drop_all()
- @testing.requires.multivalues_inserts
- def test_multivalues_insert(self):
- users.insert(
- values=[
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'}]).execute()
- rows = users.select().order_by(users.c.user_id).execute().fetchall()
- self.assert_(rows[0] == (7, 'jack'))
- self.assert_(rows[1] == (8, 'ed'))
- users.insert(values=[(9, 'jack'), (10, 'ed')]).execute()
- rows = users.select().order_by(users.c.user_id).execute().fetchall()
- self.assert_(rows[2] == (9, 'jack'))
- self.assert_(rows[3] == (10, 'ed'))
-
- def test_insert_heterogeneous_params(self):
- """test that executemany parameters are asserted to match the
- parameter set of the first."""
-
- assert_raises_message(
- exc.StatementError,
- r"\(sqlalchemy.exc.InvalidRequestError\) A value is required for "
- "bind parameter 'user_name', in "
- "parameter group 2 "
- r"\[SQL: u?'INSERT INTO query_users",
- users.insert().execute,
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9}
- )
-
- # this succeeds however. We aren't yet doing
- # a length check on all subsequent parameters.
- users.insert().execute(
- {'user_id': 7},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9}
- )
-
- def test_lastrow_accessor(self):
- """Tests the inserted_primary_key and lastrow_has_id() functions."""
-
- def insert_values(engine, table, values):
- """
- Inserts a row into a table, returns the full list of values
- INSERTed including defaults that fired off on the DB side and
- detects rows that had defaults and post-fetches.
- """
-
- # verify implicit_returning is working
- if engine.dialect.implicit_returning:
- ins = table.insert()
- comp = ins.compile(engine, column_keys=list(values))
- if not set(values).issuperset(
- c.key for c in table.primary_key):
- assert comp.returning
-
- result = engine.execute(table.insert(), **values)
- ret = values.copy()
-
- for col, id in zip(table.primary_key, result.inserted_primary_key):
- ret[col.key] = id
-
- if result.lastrow_has_defaults():
- criterion = and_(
- *[
- col == id for col, id in
- zip(table.primary_key, result.inserted_primary_key)])
- row = engine.execute(table.select(criterion)).first()
- for c in table.c:
- ret[c.key] = row[c]
- return ret
-
- if testing.against('firebird', 'postgresql', 'oracle', 'mssql'):
- assert testing.db.dialect.implicit_returning
-
- if testing.db.dialect.implicit_returning:
- test_engines = [
- engines.testing_engine(options={'implicit_returning': False}),
- engines.testing_engine(options={'implicit_returning': True}),
- ]
- else:
- test_engines = [testing.db]
-
- for engine in test_engines:
- metadata = MetaData()
- for supported, table, values, assertvalues in [
- (
- {'unsupported': ['sqlite']},
- Table(
- "t1", metadata,
- Column(
- 'id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('foo', String(30), primary_key=True)),
- {'foo': 'hi'},
- {'id': 1, 'foo': 'hi'}
- ),
- (
- {'unsupported': ['sqlite']},
- Table(
- "t2", metadata,
- Column(
- 'id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('foo', String(30), primary_key=True),
- Column('bar', String(30), server_default='hi')
- ),
- {'foo': 'hi'},
- {'id': 1, 'foo': 'hi', 'bar': 'hi'}
- ),
- (
- {'unsupported': []},
- Table(
- "t3", metadata,
- Column("id", String(40), primary_key=True),
- Column('foo', String(30), primary_key=True),
- Column("bar", String(30))
- ),
- {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"},
- {'id': 'hi', 'foo': 'thisisfoo', 'bar': "thisisbar"}
- ),
- (
- {'unsupported': []},
- Table(
- "t4", metadata,
- Column(
- 'id', Integer,
- Sequence('t4_id_seq', optional=True),
- primary_key=True),
- Column('foo', String(30), primary_key=True),
- Column('bar', String(30), server_default='hi')
- ),
- {'foo': 'hi', 'id': 1},
- {'id': 1, 'foo': 'hi', 'bar': 'hi'}
- ),
- (
- {'unsupported': []},
- Table(
- "t5", metadata,
- Column('id', String(10), primary_key=True),
- Column('bar', String(30), server_default='hi')
- ),
- {'id': 'id1'},
- {'id': 'id1', 'bar': 'hi'},
- ),
- (
- {'unsupported': ['sqlite']},
- Table(
- "t6", metadata,
- Column(
- 'id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('bar', Integer, primary_key=True)
- ),
- {'bar': 0},
- {'id': 1, 'bar': 0},
- ),
- ]:
- if testing.db.name in supported['unsupported']:
- continue
- try:
- table.create(bind=engine, checkfirst=True)
- i = insert_values(engine, table, values)
- assert i == assertvalues, "tablename: %s %r %r" % \
- (table.name, repr(i), repr(assertvalues))
- finally:
- table.drop(bind=engine)
-
- # TODO: why not in the sqlite suite?
- @testing.only_on('sqlite+pysqlite')
- @testing.provide_metadata
- def test_lastrowid_zero(self):
- from sqlalchemy.dialects import sqlite
- eng = engines.testing_engine()
-
- class ExcCtx(sqlite.base.SQLiteExecutionContext):
-
- def get_lastrowid(self):
- return 0
- eng.dialect.execution_ctx_cls = ExcCtx
- t = Table(
- 't', self.metadata, Column('x', Integer, primary_key=True),
- Column('y', Integer))
- t.create(eng)
- r = eng.execute(t.insert().values(y=5))
- eq_(r.inserted_primary_key, [0])
-
- @testing.fails_on(
- 'sqlite', "sqlite autoincremnt doesn't work with composite pks")
- def test_misordered_lastrow(self):
- related = Table(
- 'related', metadata,
- Column('id', Integer, primary_key=True),
- mysql_engine='MyISAM'
- )
- t6 = Table(
- "t6", metadata,
- Column(
- 'manual_id', Integer, ForeignKey('related.id'),
- primary_key=True),
- Column(
- 'auto_id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- mysql_engine='MyISAM'
- )
-
- metadata.create_all()
- r = related.insert().values(id=12).execute()
- id = r.inserted_primary_key[0]
- assert id == 12
-
- r = t6.insert().values(manual_id=id).execute()
- eq_(r.inserted_primary_key, [12, 1])
-
- def test_implicit_id_insert_select_columns(self):
- stmt = users.insert().from_select(
- (users.c.user_id, users.c.user_name),
- users.select().where(users.c.user_id == 20))
-
- testing.db.execute(stmt)
-
- def test_implicit_id_insert_select_keys(self):
- stmt = users.insert().from_select(
- ["user_id", "user_name"],
- users.select().where(users.c.user_id == 20))
-
- testing.db.execute(stmt)
-
- def test_row_iteration(self):
- users.insert().execute(
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9, 'user_name': 'fred'},
- )
- r = users.select().execute()
- l = []
- for row in r:
- l.append(row)
- self.assert_(len(l) == 3)
-
- @testing.requires.subqueries
- def test_anonymous_rows(self):
- users.insert().execute(
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9, 'user_name': 'fred'},
- )
-
- sel = select([users.c.user_id]).where(users.c.user_name == 'jack'). \
- as_scalar()
- for row in select([sel + 1, sel + 3], bind=users.bind).execute():
- assert row['anon_1'] == 8
- assert row['anon_2'] == 10
-
@testing.fails_on(
'firebird', "kinterbasdb doesn't send full type information")
def test_order_by_label(self):
@@ -364,154 +110,6 @@ class QueryTest(fixtures.TestBase):
[("test: ed",), ("test: fred",), ("test: jack",)]
)
- def test_row_comparison(self):
- users.insert().execute(user_id=7, user_name='jack')
- rp = users.select().execute().first()
-
- self.assert_(rp == rp)
- self.assert_(not(rp != rp))
-
- equal = (7, 'jack')
-
- self.assert_(rp == equal)
- self.assert_(equal == rp)
- self.assert_(not (rp != equal))
- self.assert_(not (equal != equal))
-
- def endless():
- while True:
- yield 1
- self.assert_(rp != endless())
- self.assert_(endless() != rp)
-
- # test that everything compares the same
- # as it would against a tuple
- import operator
- for compare in [False, 8, endless(), 'xyz', (7, 'jack')]:
- for op in [
- operator.eq, operator.ne, operator.gt,
- operator.lt, operator.ge, operator.le
- ]:
-
- try:
- control = op(equal, compare)
- except TypeError:
- # Py3K raises TypeError for some invalid comparisons
- assert_raises(TypeError, op, rp, compare)
- else:
- eq_(control, op(rp, compare))
-
- try:
- control = op(compare, equal)
- except TypeError:
- # Py3K raises TypeError for some invalid comparisons
- assert_raises(TypeError, op, compare, rp)
- else:
- eq_(control, op(compare, rp))
-
- @testing.provide_metadata
- def test_column_label_overlap_fallback(self):
- content = Table(
- 'content', self.metadata,
- Column('type', String(30)),
- )
- bar = Table(
- 'bar', self.metadata,
- Column('content_type', String(30))
- )
- self.metadata.create_all(testing.db)
- testing.db.execute(content.insert().values(type="t1"))
-
- row = testing.db.execute(content.select(use_labels=True)).first()
- assert content.c.type in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') in row
-
- row = testing.db.execute(
- select([content.c.type.label("content_type")])).first()
- assert content.c.type in row
-
- assert bar.c.content_type not in row
-
- assert sql.column('content_type') in row
-
- row = testing.db.execute(select([func.now().label("content_type")])). \
- first()
- assert content.c.type not in row
-
- assert bar.c.content_type not in row
-
- assert sql.column('content_type') in row
-
- def test_pickled_rows(self):
- users.insert().execute(
- {'user_id': 7, 'user_name': 'jack'},
- {'user_id': 8, 'user_name': 'ed'},
- {'user_id': 9, 'user_name': 'fred'},
- )
-
- for pickle in False, True:
- for use_labels in False, True:
- result = users.select(use_labels=use_labels).order_by(
- users.c.user_id).execute().fetchall()
-
- if pickle:
- result = util.pickle.loads(util.pickle.dumps(result))
-
- eq_(
- result,
- [(7, "jack"), (8, "ed"), (9, "fred")]
- )
- if use_labels:
- eq_(result[0]['query_users_user_id'], 7)
- eq_(
- list(result[0].keys()),
- ["query_users_user_id", "query_users_user_name"])
- else:
- eq_(result[0]['user_id'], 7)
- eq_(list(result[0].keys()), ["user_id", "user_name"])
-
- eq_(result[0][0], 7)
- eq_(result[0][users.c.user_id], 7)
- eq_(result[0][users.c.user_name], 'jack')
-
- if not pickle or use_labels:
- assert_raises(
- exc.NoSuchColumnError,
- lambda: result[0][addresses.c.user_id])
- else:
- # test with a different table. name resolution is
- # causing 'user_id' to match when use_labels wasn't used.
- eq_(result[0][addresses.c.user_id], 7)
-
- assert_raises(
- exc.NoSuchColumnError, lambda: result[0]['fake key'])
- assert_raises(
- exc.NoSuchColumnError,
- lambda: result[0][addresses.c.address_id])
-
- def test_column_error_printing(self):
- row = testing.db.execute(select([1])).first()
-
- class unprintable(object):
-
- def __str__(self):
- raise ValueError("nope")
-
- msg = r"Could not locate column in row for column '%s'"
-
- for accessor, repl in [
- ("x", "x"),
- (Column("q", Integer), "q"),
- (Column("q", Integer) + 12, r"q \+ :q_1"),
- (unprintable(), "unprintable element.*"),
- ]:
- assert_raises_message(
- exc.NoSuchColumnError,
- msg % repl,
- lambda: row[accessor]
- )
-
@testing.requires.boolean_col_expressions
def test_or_and_as_columns(self):
true, false = literal(True), literal(False)
@@ -538,16 +136,6 @@ class QueryTest(fixtures.TestBase):
assert row.x == True # noqa
assert row.y == False # noqa
- def test_fetchmany(self):
- users.insert().execute(user_id=7, user_name='jack')
- users.insert().execute(user_id=8, user_name='ed')
- users.insert().execute(user_id=9, user_name='fred')
- r = users.select().execute()
- l = []
- for row in r.fetchmany(size=2):
- l.append(row)
- self.assert_(len(l) == 2, "fetchmany(size=2) got %s rows" % len(l))
-
def test_like_ops(self):
users.insert().execute(
{'user_id': 1, 'user_name': 'apples'},
@@ -816,521 +404,6 @@ class QueryTest(fixtures.TestBase):
use_labels=labels),
[(3, 'a'), (2, 'b'), (1, None)])
- def test_column_slices(self):
- users.insert().execute(user_id=1, user_name='john')
- users.insert().execute(user_id=2, user_name='jack')
- addresses.insert().execute(
- address_id=1, user_id=2, address='foo@bar.com')
-
- r = text(
- "select * from query_addresses", bind=testing.db).execute().first()
- self.assert_(r[0:1] == (1,))
- self.assert_(r[1:] == (2, 'foo@bar.com'))
- self.assert_(r[:-1] == (1, 2))
-
- def test_column_accessor_basic_compiled(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='jack')
- )
-
- r = users.select(users.c.user_id == 2).execute().first()
- self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
- self.assert_(
- r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
-
- def test_column_accessor_basic_text(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='jack')
- )
- r = testing.db.execute(
- text("select * from query_users where user_id=2")).first()
- self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
- self.assert_(
- r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
-
- def test_column_accessor_textual_select(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='jack')
- )
- # this will create column() objects inside
- # the select(), these need to match on name anyway
- r = testing.db.execute(
- select([
- column('user_id'), column('user_name')
- ]).select_from(table('query_users')).
- where(text('user_id=2'))
- ).first()
- self.assert_(r.user_id == r['user_id'] == r[users.c.user_id] == 2)
- self.assert_(
- r.user_name == r['user_name'] == r[users.c.user_name] == 'jack')
-
- def test_column_accessor_dotted_union(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- # test a little sqlite weirdness - with the UNION,
- # cols come back as "query_users.user_id" in cursor.description
- r = testing.db.execute(
- text(
- "select query_users.user_id, query_users.user_name "
- "from query_users "
- "UNION select query_users.user_id, "
- "query_users.user_name from query_users"
- )
- ).first()
- eq_(r['user_id'], 1)
- eq_(r['user_name'], "john")
- eq_(list(r.keys()), ["user_id", "user_name"])
-
- @testing.only_on("sqlite", "sqlite specific feature")
- def test_column_accessor_sqlite_raw(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- r = text(
- "select query_users.user_id, query_users.user_name "
- "from query_users "
- "UNION select query_users.user_id, "
- "query_users.user_name from query_users",
- bind=testing.db).execution_options(sqlite_raw_colnames=True). \
- execute().first()
- assert 'user_id' not in r
- assert 'user_name' not in r
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
-
- @testing.only_on("sqlite", "sqlite specific feature")
- def test_column_accessor_sqlite_translated(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- r = text(
- "select query_users.user_id, query_users.user_name "
- "from query_users "
- "UNION select query_users.user_id, "
- "query_users.user_name from query_users",
- bind=testing.db).execute().first()
- eq_(r['user_id'], 1)
- eq_(r['user_name'], "john")
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- eq_(list(r.keys()), ["user_id", "user_name"])
-
- def test_column_accessor_labels_w_dots(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
- # test using literal tablename.colname
- r = text(
- 'select query_users.user_id AS "query_users.user_id", '
- 'query_users.user_name AS "query_users.user_name" '
- 'from query_users', bind=testing.db).\
- execution_options(sqlite_raw_colnames=True).execute().first()
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- assert "user_name" not in r
- eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
-
- def test_column_accessor_unary(self):
- users.insert().execute(
- dict(user_id=1, user_name='john'),
- )
-
- # unary experssions
- r = select([users.c.user_name.distinct()]).order_by(
- users.c.user_name).execute().first()
- eq_(r[users.c.user_name], 'john')
- eq_(r.user_name, 'john')
-
- def test_column_accessor_err(self):
- r = testing.db.execute(select([1])).first()
- assert_raises_message(
- AttributeError,
- "Could not locate column in row for column 'foo'",
- getattr, r, "foo"
- )
- assert_raises_message(
- KeyError,
- "Could not locate column in row for column 'foo'",
- lambda: r['foo']
- )
-
- def test_graceful_fetch_on_non_rows(self):
- """test that calling fetchone() etc. on a result that doesn't
- return rows fails gracefully.
-
- """
-
- # these proxies don't work with no cursor.description present.
- # so they don't apply to this test at the moment.
- # result.FullyBufferedResultProxy,
- # result.BufferedRowResultProxy,
- # result.BufferedColumnResultProxy
-
- conn = testing.db.connect()
- for meth in ('fetchone', 'fetchall', 'first', 'scalar', 'fetchmany'):
- trans = conn.begin()
- result = conn.execute(users.insert(), user_id=1)
- assert_raises_message(
- exc.ResourceClosedError,
- "This result object does not return rows. "
- "It has been closed automatically.",
- getattr(result, meth),
- )
- trans.rollback()
-
- @testing.requires.empty_inserts
- @testing.requires.returning
- def test_no_inserted_pk_on_returning(self):
- result = testing.db.execute(users.insert().returning(
- users.c.user_id, users.c.user_name))
- assert_raises_message(
- exc.InvalidRequestError,
- r"Can't call inserted_primary_key when returning\(\) is used.",
- getattr, result, 'inserted_primary_key'
- )
-
- def test_fetchone_til_end(self):
- result = testing.db.execute("select * from query_users")
- eq_(result.fetchone(), None)
- eq_(result.fetchone(), None)
- eq_(result.fetchone(), None)
- result.close()
- assert_raises_message(
- exc.ResourceClosedError,
- "This result object is closed.",
- result.fetchone
- )
-
- def test_row_case_sensitive(self):
- row = testing.db.execute(
- select([
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive")
- ])
- ).first()
-
- eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
- eq_(row["case_insensitive"], 1)
- eq_(row["CaseSensitive"], 2)
-
- assert_raises(
- KeyError,
- lambda: row["Case_insensitive"]
- )
- assert_raises(
- KeyError,
- lambda: row["casesensitive"]
- )
-
- def test_row_case_insensitive(self):
- ins_db = engines.testing_engine(options={"case_sensitive": False})
- row = ins_db.execute(
- select([
- literal_column("1").label("case_insensitive"),
- literal_column("2").label("CaseSensitive")
- ])
- ).first()
-
- eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
- eq_(row["case_insensitive"], 1)
- eq_(row["CaseSensitive"], 2)
- eq_(row["Case_insensitive"], 1)
- eq_(row["casesensitive"], 2)
-
- def test_row_as_args(self):
- users.insert().execute(user_id=1, user_name='john')
- r = users.select(users.c.user_id == 1).execute().first()
- users.delete().execute()
- users.insert().execute(r)
- eq_(users.select().execute().fetchall(), [(1, 'john')])
-
- def test_result_as_args(self):
- users.insert().execute([
- dict(user_id=1, user_name='john'),
- dict(user_id=2, user_name='ed')])
- r = users.select().execute()
- users2.insert().execute(list(r))
- eq_(
- users2.select().order_by(users2.c.user_id).execute().fetchall(),
- [(1, 'john'), (2, 'ed')]
- )
-
- users2.delete().execute()
- r = users.select().execute()
- users2.insert().execute(*list(r))
- eq_(
- users2.select().order_by(users2.c.user_id).execute().fetchall(),
- [(1, 'john'), (2, 'ed')]
- )
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_ambiguous_column(self):
- users.insert().execute(user_id=1, user_name='john')
- result = users.outerjoin(addresses).select().execute()
- r = result.first()
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r['user_id']
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r[users.c.user_id]
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r[addresses.c.user_id]
- )
-
- # try to trick it - fake_table isn't in the result!
- # we get the correct error
- fake_table = Table('fake', MetaData(), Column('user_id', Integer))
- assert_raises_message(
- exc.InvalidRequestError,
- "Could not locate column in row for column 'fake.user_id'",
- lambda: r[fake_table.c.user_id]
- )
-
- r = util.pickle.loads(util.pickle.dumps(r))
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r['user_id']
- )
-
- result = users.outerjoin(addresses).select().execute()
- result = _result.BufferedColumnResultProxy(result.context)
- r = result.first()
- assert isinstance(r, _result.BufferedColumnRow)
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r['user_id']
- )
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_ambiguous_column_by_col(self):
- users.insert().execute(user_id=1, user_name='john')
- ua = users.alias()
- u2 = users.alias()
- result = select([users.c.user_id, ua.c.user_id]).execute()
- row = result.first()
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[users.c.user_id]
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[ua.c.user_id]
- )
-
- # Unfortunately, this fails -
- # we'd like
- # "Could not locate column in row"
- # to be raised here, but the check for
- # "common column" in _compare_name_for_result()
- # has other requirements to be more liberal.
- # Ultimately the
- # expression system would need a way to determine
- # if given two columns in a "proxy" relationship, if they
- # refer to a different parent table
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[u2.c.user_id]
- )
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_ambiguous_column_contains(self):
- # ticket 2702. in 0.7 we'd get True, False.
- # in 0.8, both columns are present so it's True;
- # but when they're fetched you'll get the ambiguous error.
- users.insert().execute(user_id=1, user_name='john')
- result = select([users.c.user_id, addresses.c.user_id]).\
- select_from(users.outerjoin(addresses)).execute()
- row = result.first()
-
- eq_(
- set([users.c.user_id in row, addresses.c.user_id in row]),
- set([True])
- )
-
- def test_ambiguous_column_by_col_plus_label(self):
- users.insert().execute(user_id=1, user_name='john')
- result = select(
- [users.c.user_id,
- type_coerce(users.c.user_id, Integer).label('foo')]).execute()
- row = result.first()
- eq_(
- row[users.c.user_id], 1
- )
- eq_(
- row[1], 1
- )
-
- def test_fetch_partial_result_map(self):
- users.insert().execute(user_id=7, user_name='ed')
-
- t = text("select * from query_users").columns(
- user_name=String()
- )
- eq_(
- testing.db.execute(t).fetchall(), [(7, 'ed')]
- )
-
- def test_fetch_unordered_result_map(self):
- users.insert().execute(user_id=7, user_name='ed')
-
- class Goofy1(TypeDecorator):
- impl = String
-
- def process_result_value(self, value, dialect):
- return value + "a"
-
- class Goofy2(TypeDecorator):
- impl = String
-
- def process_result_value(self, value, dialect):
- return value + "b"
-
- class Goofy3(TypeDecorator):
- impl = String
-
- def process_result_value(self, value, dialect):
- return value + "c"
-
- t = text(
- "select user_name as a, user_name as b, "
- "user_name as c from query_users").columns(
- a=Goofy1(), b=Goofy2(), c=Goofy3()
- )
- eq_(
- testing.db.execute(t).fetchall(), [
- ('eda', 'edb', 'edc')
- ]
- )
-
- @testing.requires.subqueries
- def test_column_label_targeting(self):
- users.insert().execute(user_id=7, user_name='ed')
-
- for s in (
- users.select().alias('foo'),
- users.select().alias(users.name),
- ):
- row = s.select(use_labels=True).execute().first()
- assert row[s.c.user_id] == 7
- assert row[s.c.user_name] == 'ed'
-
- def test_keys(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute()
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
- r = r.first()
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
-
- def test_items(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute().first()
- eq_(
- [(x[0].lower(), x[1]) for x in list(r.items())],
- [('user_id', 1), ('user_name', 'foo')])
-
- def test_len(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute().first()
- eq_(len(r), 2)
-
- r = testing.db.execute('select user_name, user_id from query_users'). \
- first()
- eq_(len(r), 2)
- r = testing.db.execute('select user_name from query_users').first()
- eq_(len(r), 1)
-
- def test_sorting_in_python(self):
- users.insert().execute(
- dict(user_id=1, user_name='foo'),
- dict(user_id=2, user_name='bar'),
- dict(user_id=3, user_name='def'),
- )
-
- rows = users.select().order_by(users.c.user_name).execute().fetchall()
-
- eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')])
-
- eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')])
-
- def test_column_order_with_simple_query(self):
- # should return values in column definition order
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select(users.c.user_id == 1).execute().first()
- eq_(r[0], 1)
- eq_(r[1], 'foo')
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
- eq_(list(r.values()), [1, 'foo'])
-
- def test_column_order_with_text_query(self):
- # should return values in query order
- users.insert().execute(user_id=1, user_name='foo')
- r = testing.db.execute('select user_name, user_id from query_users'). \
- first()
- eq_(r[0], 'foo')
- eq_(r[1], 1)
- eq_([x.lower() for x in list(r.keys())], ['user_name', 'user_id'])
- eq_(list(r.values()), ['foo', 1])
-
- @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
- @testing.crashes('firebird', 'An identifier must begin with a letter')
- def test_column_accessor_shadow(self):
- meta = MetaData(testing.db)
- shadowed = Table(
- 'test_shadowed', meta,
- Column('shadow_id', INT, primary_key=True),
- Column('shadow_name', VARCHAR(20)),
- Column('parent', VARCHAR(20)),
- Column('row', VARCHAR(40)),
- Column('_parent', VARCHAR(20)),
- Column('_row', VARCHAR(20)),
- )
- shadowed.create(checkfirst=True)
- try:
- shadowed.insert().execute(
- shadow_id=1, shadow_name='The Shadow', parent='The Light',
- row='Without light there is no shadow',
- _parent='Hidden parent', _row='Hidden row')
- r = shadowed.select(shadowed.c.shadow_id == 1).execute().first()
- self.assert_(
- r.shadow_id == r['shadow_id'] == r[shadowed.c.shadow_id] == 1)
- self.assert_(
- r.shadow_name == r['shadow_name'] ==
- r[shadowed.c.shadow_name] == 'The Shadow')
- self.assert_(
- r.parent == r['parent'] == r[shadowed.c.parent] == 'The Light')
- self.assert_(
- r.row == r['row'] == r[shadowed.c.row] ==
- 'Without light there is no shadow')
- self.assert_(r['_parent'] == 'Hidden parent')
- self.assert_(r['_row'] == 'Hidden row')
- finally:
- shadowed.drop(checkfirst=True)
-
@testing.emits_warning('.*empty sequence.*')
def test_in_filtering(self):
"""test the behavior of the in_() function."""
@@ -1480,393 +553,6 @@ class RequiredBindTest(fixtures.TablesTest):
is_(bindparam('foo', callable_=c, required=False).required, False)
-class TableInsertTest(fixtures.TablesTest):
-
- """test for consistent insert behavior across dialects
- regarding the inline=True flag, lower-case 't' tables.
-
- """
- run_create_tables = 'each'
- __backend__ = True
-
- @classmethod
- def define_tables(cls, metadata):
- Table(
- 'foo', metadata,
- Column('id', Integer, Sequence('t_id_seq'), primary_key=True),
- Column('data', String(50)),
- Column('x', Integer)
- )
-
- def _fixture(self, types=True):
- if types:
- t = sql.table(
- 'foo', sql.column('id', Integer),
- sql.column('data', String),
- sql.column('x', Integer))
- else:
- t = sql.table(
- 'foo', sql.column('id'), sql.column('data'), sql.column('x'))
- return t
-
- def _test(self, stmt, row, returning=None, inserted_primary_key=False):
- r = testing.db.execute(stmt)
-
- if returning:
- returned = r.first()
- eq_(returned, returning)
- elif inserted_primary_key is not False:
- eq_(r.inserted_primary_key, inserted_primary_key)
-
- eq_(testing.db.execute(self.tables.foo.select()).first(), row)
-
- def _test_multi(self, stmt, rows, data):
- testing.db.execute(stmt, rows)
- eq_(
- testing.db.execute(
- self.tables.foo.select().
- order_by(self.tables.foo.c.id)).fetchall(),
- data)
-
- @testing.requires.sequences
- def test_expicit_sequence(self):
- t = self._fixture()
- self._test(
- t.insert().values(
- id=func.next_value(Sequence('t_id_seq')), data='data', x=5),
- (1, 'data', 5)
- )
-
- def test_uppercase(self):
- t = self.tables.foo
- self._test(
- t.insert().values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- def test_uppercase_inline(self):
- t = self.tables.foo
- self._test(
- t.insert(inline=True).values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- @testing.crashes(
- "mssql+pyodbc",
- "Pyodbc + SQL Server + Py3K, some decimal handling issue")
- def test_uppercase_inline_implicit(self):
- t = self.tables.foo
- self._test(
- t.insert(inline=True).values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[None]
- )
-
- def test_uppercase_implicit(self):
- t = self.tables.foo
- self._test(
- t.insert().values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- def test_uppercase_direct_params(self):
- t = self.tables.foo
- self._test(
- t.insert().values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[1]
- )
-
- @testing.requires.returning
- def test_uppercase_direct_params_returning(self):
- t = self.tables.foo
- self._test(
- t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
- (1, 'data', 5),
- returning=(1, 5)
- )
-
- @testing.fails_on(
- 'mssql', "lowercase table doesn't support identity insert disable")
- def test_direct_params(self):
- t = self._fixture()
- self._test(
- t.insert().values(id=1, data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[]
- )
-
- @testing.fails_on(
- 'mssql', "lowercase table doesn't support identity insert disable")
- @testing.requires.returning
- def test_direct_params_returning(self):
- t = self._fixture()
- self._test(
- t.insert().values(id=1, data='data', x=5).returning(t.c.id, t.c.x),
- (1, 'data', 5),
- returning=(1, 5)
- )
-
- @testing.requires.emulated_lastrowid
- def test_implicit_pk(self):
- t = self._fixture()
- self._test(
- t.insert().values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[]
- )
-
- @testing.requires.emulated_lastrowid
- def test_implicit_pk_multi_rows(self):
- t = self._fixture()
- self._test_multi(
- t.insert(),
- [
- {'data': 'd1', 'x': 5},
- {'data': 'd2', 'x': 6},
- {'data': 'd3', 'x': 7},
- ],
- [
- (1, 'd1', 5),
- (2, 'd2', 6),
- (3, 'd3', 7)
- ],
- )
-
- @testing.requires.emulated_lastrowid
- def test_implicit_pk_inline(self):
- t = self._fixture()
- self._test(
- t.insert(inline=True).values(data='data', x=5),
- (1, 'data', 5),
- inserted_primary_key=[]
- )
-
-
-class KeyTargetingTest(fixtures.TablesTest):
- run_inserts = 'once'
- run_deletes = None
- __backend__ = True
-
- @classmethod
- def define_tables(cls, metadata):
- Table(
- 'keyed1', metadata, Column("a", CHAR(2), key="b"),
- Column("c", CHAR(2), key="q")
- )
- Table('keyed2', metadata, Column("a", CHAR(2)), Column("b", CHAR(2)))
- Table('keyed3', metadata, Column("a", CHAR(2)), Column("d", CHAR(2)))
- Table('keyed4', metadata, Column("b", CHAR(2)), Column("q", CHAR(2)))
- Table('content', metadata, Column('t', String(30), key="type"))
- Table('bar', metadata, Column('ctype', String(30), key="content_type"))
-
- if testing.requires.schemas.enabled:
- Table(
- 'wschema', metadata,
- Column("a", CHAR(2), key="b"),
- Column("c", CHAR(2), key="q"),
- schema=testing.config.test_schema
- )
-
- @classmethod
- def insert_data(cls):
- cls.tables.keyed1.insert().execute(dict(b="a1", q="c1"))
- cls.tables.keyed2.insert().execute(dict(a="a2", b="b2"))
- cls.tables.keyed3.insert().execute(dict(a="a3", d="d3"))
- cls.tables.keyed4.insert().execute(dict(b="b4", q="q4"))
- cls.tables.content.insert().execute(type="t1")
-
- if testing.requires.schemas.enabled:
- cls.tables['%s.wschema' % testing.config.test_schema].insert().execute(
- dict(b="a1", q="c1"))
-
- @testing.requires.schemas
- def test_keyed_accessor_wschema(self):
- keyed1 = self.tables['%s.wschema' % testing.config.test_schema]
- row = testing.db.execute(keyed1.select()).first()
-
- eq_(row.b, "a1")
- eq_(row.q, "c1")
- eq_(row.a, "a1")
- eq_(row.c, "c1")
-
- def test_keyed_accessor_single(self):
- keyed1 = self.tables.keyed1
- row = testing.db.execute(keyed1.select()).first()
-
- eq_(row.b, "a1")
- eq_(row.q, "c1")
- eq_(row.a, "a1")
- eq_(row.c, "c1")
-
- def test_keyed_accessor_single_labeled(self):
- keyed1 = self.tables.keyed1
- row = testing.db.execute(keyed1.select().apply_labels()).first()
-
- eq_(row.keyed1_b, "a1")
- eq_(row.keyed1_q, "c1")
- eq_(row.keyed1_a, "a1")
- eq_(row.keyed1_c, "c1")
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_keyed_accessor_composite_conflict_2(self):
- keyed1 = self.tables.keyed1
- keyed2 = self.tables.keyed2
-
- row = testing.db.execute(select([keyed1, keyed2])).first()
- # row.b is unambiguous
- eq_(row.b, "b2")
- # row.a is ambiguous
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambig",
- getattr, row, "a"
- )
-
- def test_keyed_accessor_composite_names_precedent(self):
- keyed1 = self.tables.keyed1
- keyed4 = self.tables.keyed4
-
- row = testing.db.execute(select([keyed1, keyed4])).first()
- eq_(row.b, "b4")
- eq_(row.q, "q4")
- eq_(row.a, "a1")
- eq_(row.c, "c1")
-
- @testing.requires.duplicate_names_in_cursor_description
- def test_keyed_accessor_composite_keys_precedent(self):
- keyed1 = self.tables.keyed1
- keyed3 = self.tables.keyed3
-
- row = testing.db.execute(select([keyed1, keyed3])).first()
- eq_(row.q, "c1")
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name 'b'",
- getattr, row, "b"
- )
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name 'a'",
- getattr, row, "a"
- )
- eq_(row.d, "d3")
-
- def test_keyed_accessor_composite_labeled(self):
- keyed1 = self.tables.keyed1
- keyed2 = self.tables.keyed2
-
- row = testing.db.execute(select([keyed1, keyed2]).apply_labels()). \
- first()
- eq_(row.keyed1_b, "a1")
- eq_(row.keyed1_a, "a1")
- eq_(row.keyed1_q, "c1")
- eq_(row.keyed1_c, "c1")
- eq_(row.keyed2_a, "a2")
- eq_(row.keyed2_b, "b2")
- assert_raises(KeyError, lambda: row['keyed2_c'])
- assert_raises(KeyError, lambda: row['keyed2_q'])
-
- def test_column_label_overlap_fallback(self):
- content, bar = self.tables.content, self.tables.bar
- row = testing.db.execute(
- select([content.c.type.label("content_type")])).first()
- assert content.c.type not in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') in row
-
- row = testing.db.execute(select([func.now().label("content_type")])). \
- first()
- assert content.c.type not in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') in row
-
- def test_column_label_overlap_fallback_2(self):
- content, bar = self.tables.content, self.tables.bar
- row = testing.db.execute(content.select(use_labels=True)).first()
- assert content.c.type in row
- assert bar.c.content_type not in row
- assert sql.column('content_type') not in row
-
- def test_columnclause_schema_column_one(self):
- keyed2 = self.tables.keyed2
-
- # this is addressed by [ticket:2932]
- # ColumnClause._compare_name_for_result allows the
- # columns which the statement is against to be lightweight
- # cols, which results in a more liberal comparison scheme
- a, b = sql.column('a'), sql.column('b')
- stmt = select([a, b]).select_from(table("keyed2"))
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
-
- def test_columnclause_schema_column_two(self):
- keyed2 = self.tables.keyed2
-
- a, b = sql.column('a'), sql.column('b')
- stmt = select([keyed2.c.a, keyed2.c.b])
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
-
- def test_columnclause_schema_column_three(self):
- keyed2 = self.tables.keyed2
-
- # this is also addressed by [ticket:2932]
-
- a, b = sql.column('a'), sql.column('b')
- stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR)
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
- assert stmt.c.a in row
- assert stmt.c.b in row
-
- def test_columnclause_schema_column_four(self):
- keyed2 = self.tables.keyed2
-
- # this is also addressed by [ticket:2932]
-
- a, b = sql.column('keyed2_a'), sql.column('keyed2_b')
- stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
- a, b)
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert a in row
- assert b in row
- assert stmt.c.keyed2_a in row
- assert stmt.c.keyed2_b in row
-
- def test_columnclause_schema_column_five(self):
- keyed2 = self.tables.keyed2
-
- # this is also addressed by [ticket:2932]
-
- stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
- keyed2_a=CHAR, keyed2_b=CHAR)
- row = testing.db.execute(stmt).first()
-
- assert keyed2.c.a in row
- assert keyed2.c.b in row
- assert stmt.c.keyed2_a in row
- assert stmt.c.keyed2_b in row
-
-
class LimitTest(fixtures.TestBase):
__backend__ = True
diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py
new file mode 100644
index 000000000..8461996ea
--- /dev/null
+++ b/test/sql/test_resultset.py
@@ -0,0 +1,1136 @@
+from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, \
+ in_, not_in_, is_, ne_
+from sqlalchemy import testing
+from sqlalchemy.testing import fixtures, engines
+from sqlalchemy import util
+from sqlalchemy import (
+ exc, sql, func, select, String, Integer, MetaData, ForeignKey,
+ VARCHAR, INT, CHAR, text, type_coerce, literal_column,
+ TypeDecorator, table, column)
+from sqlalchemy.engine import result as _result
+from sqlalchemy.testing.schema import Table, Column
+import operator
+
+
+class ResultProxyTest(fixtures.TablesTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'users', metadata,
+ Column(
+ 'user_id', INT, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True
+ )
+ Table(
+ 'addresses', metadata,
+ Column(
+ 'address_id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('user_id', Integer, ForeignKey('users.user_id')),
+ Column('address', String(30)),
+ test_needs_acid=True
+ )
+
+ Table(
+ 'users2', metadata,
+ Column('user_id', INT, primary_key=True),
+ Column('user_name', VARCHAR(20)),
+ test_needs_acid=True
+ )
+
+ def test_row_iteration(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9, 'user_name': 'fred'},
+ )
+ r = users.select().execute()
+ l = []
+ for row in r:
+ l.append(row)
+ eq_(len(l), 3)
+
+ @testing.requires.subqueries
+ def test_anonymous_rows(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9, 'user_name': 'fred'},
+ )
+
+ sel = select([users.c.user_id]).where(users.c.user_name == 'jack'). \
+ as_scalar()
+ for row in select([sel + 1, sel + 3], bind=users.bind).execute():
+ eq_(row['anon_1'], 8)
+ eq_(row['anon_2'], 10)
+
+ def test_row_comparison(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='jack')
+ rp = users.select().execute().first()
+
+ eq_(rp, rp)
+ is_(not(rp != rp), True)
+
+ equal = (7, 'jack')
+
+ eq_(rp, equal)
+ eq_(equal, rp)
+ is_((not (rp != equal)), True)
+ is_(not (equal != equal), True)
+
+ def endless():
+ while True:
+ yield 1
+ ne_(rp, endless())
+ ne_(endless(), rp)
+
+ # test that everything compares the same
+ # as it would against a tuple
+ for compare in [False, 8, endless(), 'xyz', (7, 'jack')]:
+ for op in [
+ operator.eq, operator.ne, operator.gt,
+ operator.lt, operator.ge, operator.le
+ ]:
+
+ try:
+ control = op(equal, compare)
+ except TypeError:
+ # Py3K raises TypeError for some invalid comparisons
+ assert_raises(TypeError, op, rp, compare)
+ else:
+ eq_(control, op(rp, compare))
+
+ try:
+ control = op(compare, equal)
+ except TypeError:
+ # Py3K raises TypeError for some invalid comparisons
+ assert_raises(TypeError, op, compare, rp)
+ else:
+ eq_(control, op(compare, rp))
+
+ @testing.provide_metadata
+ def test_column_label_overlap_fallback(self):
+ content = Table(
+ 'content', self.metadata,
+ Column('type', String(30)),
+ )
+ bar = Table(
+ 'bar', self.metadata,
+ Column('content_type', String(30))
+ )
+ self.metadata.create_all(testing.db)
+ testing.db.execute(content.insert().values(type="t1"))
+
+ row = testing.db.execute(content.select(use_labels=True)).first()
+ in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+ in_(sql.column('content_type'), row)
+
+ row = testing.db.execute(
+ select([content.c.type.label("content_type")])).first()
+ in_(content.c.type, row)
+
+ not_in_(bar.c.content_type, row)
+
+ in_(sql.column('content_type'), row)
+
+ row = testing.db.execute(select([func.now().label("content_type")])). \
+ first()
+ not_in_(content.c.type, row)
+
+ not_in_(bar.c.content_type, row)
+
+ in_(sql.column('content_type'), row)
+
+ def test_pickled_rows(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ users.insert().execute(
+ {'user_id': 7, 'user_name': 'jack'},
+ {'user_id': 8, 'user_name': 'ed'},
+ {'user_id': 9, 'user_name': 'fred'},
+ )
+
+ for pickle in False, True:
+ for use_labels in False, True:
+ result = users.select(use_labels=use_labels).order_by(
+ users.c.user_id).execute().fetchall()
+
+ if pickle:
+ result = util.pickle.loads(util.pickle.dumps(result))
+
+ eq_(
+ result,
+ [(7, "jack"), (8, "ed"), (9, "fred")]
+ )
+ if use_labels:
+ eq_(result[0]['users_user_id'], 7)
+ eq_(
+ list(result[0].keys()),
+ ["users_user_id", "users_user_name"])
+ else:
+ eq_(result[0]['user_id'], 7)
+ eq_(list(result[0].keys()), ["user_id", "user_name"])
+
+ eq_(result[0][0], 7)
+ eq_(result[0][users.c.user_id], 7)
+ eq_(result[0][users.c.user_name], 'jack')
+
+ if not pickle or use_labels:
+ assert_raises(
+ exc.NoSuchColumnError,
+ lambda: result[0][addresses.c.user_id])
+ else:
+ # test with a different table. name resolution is
+ # causing 'user_id' to match when use_labels wasn't used.
+ eq_(result[0][addresses.c.user_id], 7)
+
+ assert_raises(
+ exc.NoSuchColumnError, lambda: result[0]['fake key'])
+ assert_raises(
+ exc.NoSuchColumnError,
+ lambda: result[0][addresses.c.address_id])
+
+ def test_column_error_printing(self):
+ row = testing.db.execute(select([1])).first()
+
+ class unprintable(object):
+
+ def __str__(self):
+ raise ValueError("nope")
+
+ msg = r"Could not locate column in row for column '%s'"
+
+ for accessor, repl in [
+ ("x", "x"),
+ (Column("q", Integer), "q"),
+ (Column("q", Integer) + 12, r"q \+ :q_1"),
+ (unprintable(), "unprintable element.*"),
+ ]:
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ msg % repl,
+ lambda: row[accessor]
+ )
+
+ def test_fetchmany(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='jack')
+ users.insert().execute(user_id=8, user_name='ed')
+ users.insert().execute(user_id=9, user_name='fred')
+ r = users.select().execute()
+ l = []
+ for row in r.fetchmany(size=2):
+ l.append(row)
+ eq_(len(l), 2)
+
+ def test_column_slices(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ users.insert().execute(user_id=1, user_name='john')
+ users.insert().execute(user_id=2, user_name='jack')
+ addresses.insert().execute(
+ address_id=1, user_id=2, address='foo@bar.com')
+
+ r = text(
+ "select * from addresses", bind=testing.db).execute().first()
+ eq_(r[0:1], (1,))
+ eq_(r[1:], (2, 'foo@bar.com'))
+ eq_(r[:-1], (1, 2))
+
+ def test_column_accessor_basic_compiled(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='jack')
+ )
+
+ r = users.select(users.c.user_id == 2).execute().first()
+ eq_(r.user_id, 2)
+ eq_(r['user_id'], 2)
+ eq_(r[users.c.user_id], 2)
+
+ eq_(r.user_name, 'jack')
+ eq_(r['user_name'], 'jack')
+ eq_(r[users.c.user_name], 'jack')
+
+ def test_column_accessor_basic_text(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='jack')
+ )
+ r = testing.db.execute(
+ text("select * from users where user_id=2")).first()
+
+ eq_(r.user_id, 2)
+ eq_(r['user_id'], 2)
+ eq_(r[users.c.user_id], 2)
+
+ eq_(r.user_name, 'jack')
+ eq_(r['user_name'], 'jack')
+ eq_(r[users.c.user_name], 'jack')
+
+ def test_column_accessor_textual_select(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='jack')
+ )
+ # this will create column() objects inside
+ # the select(), these need to match on name anyway
+ r = testing.db.execute(
+ select([
+ column('user_id'), column('user_name')
+ ]).select_from(table('users')).
+ where(text('user_id=2'))
+ ).first()
+
+ eq_(r.user_id, 2)
+ eq_(r['user_id'], 2)
+ eq_(r[users.c.user_id], 2)
+
+ eq_(r.user_name, 'jack')
+ eq_(r['user_name'], 'jack')
+ eq_(r[users.c.user_name], 'jack')
+
+ def test_column_accessor_dotted_union(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ # test a little sqlite weirdness - with the UNION,
+ # cols come back as "users.user_id" in cursor.description
+ r = testing.db.execute(
+ text(
+ "select users.user_id, users.user_name "
+ "from users "
+ "UNION select users.user_id, "
+ "users.user_name from users"
+ )
+ ).first()
+ eq_(r['user_id'], 1)
+ eq_(r['user_name'], "john")
+ eq_(list(r.keys()), ["user_id", "user_name"])
+
+ @testing.only_on("sqlite", "sqlite specific feature")
+ def test_column_accessor_sqlite_raw(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ r = text(
+ "select users.user_id, users.user_name "
+ "from users "
+ "UNION select users.user_id, "
+ "users.user_name from users",
+ bind=testing.db).execution_options(sqlite_raw_colnames=True). \
+ execute().first()
+ not_in_('user_id', r)
+ not_in_('user_name', r)
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+ eq_(list(r.keys()), ["users.user_id", "users.user_name"])
+
+ @testing.only_on("sqlite", "sqlite specific feature")
+ def test_column_accessor_sqlite_translated(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ r = text(
+ "select users.user_id, users.user_name "
+ "from users "
+ "UNION select users.user_id, "
+ "users.user_name from users",
+ bind=testing.db).execute().first()
+ eq_(r['user_id'], 1)
+ eq_(r['user_name'], "john")
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+ eq_(list(r.keys()), ["user_id", "user_name"])
+
+ def test_column_accessor_labels_w_dots(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+ # test using literal tablename.colname
+ r = text(
+ 'select users.user_id AS "users.user_id", '
+ 'users.user_name AS "users.user_name" '
+ 'from users', bind=testing.db).\
+ execution_options(sqlite_raw_colnames=True).execute().first()
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+ not_in_("user_name", r)
+ eq_(list(r.keys()), ["users.user_id", "users.user_name"])
+
+ def test_column_accessor_unary(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='john'),
+ )
+
+ # unary experssions
+ r = select([users.c.user_name.distinct()]).order_by(
+ users.c.user_name).execute().first()
+ eq_(r[users.c.user_name], 'john')
+ eq_(r.user_name, 'john')
+
+ def test_column_accessor_err(self):
+ r = testing.db.execute(select([1])).first()
+ assert_raises_message(
+ AttributeError,
+ "Could not locate column in row for column 'foo'",
+ getattr, r, "foo"
+ )
+ assert_raises_message(
+ KeyError,
+ "Could not locate column in row for column 'foo'",
+ lambda: r['foo']
+ )
+
+ def test_graceful_fetch_on_non_rows(self):
+ """test that calling fetchone() etc. on a result that doesn't
+ return rows fails gracefully.
+
+ """
+
+ # these proxies don't work with no cursor.description present.
+ # so they don't apply to this test at the moment.
+ # result.FullyBufferedResultProxy,
+ # result.BufferedRowResultProxy,
+ # result.BufferedColumnResultProxy
+
+ users = self.tables.users
+
+ conn = testing.db.connect()
+ for meth in [
+ lambda r: r.fetchone(),
+ lambda r: r.fetchall(),
+ lambda r: r.first(),
+ lambda r: r.scalar(),
+ lambda r: r.fetchmany(),
+ lambda r: r._getter('user'),
+ lambda r: r._has_key('user'),
+ ]:
+ trans = conn.begin()
+ result = conn.execute(users.insert(), user_id=1)
+ assert_raises_message(
+ exc.ResourceClosedError,
+ "This result object does not return rows. "
+ "It has been closed automatically.",
+ meth, result,
+ )
+ trans.rollback()
+
+ def test_fetchone_til_end(self):
+ result = testing.db.execute("select * from users")
+ eq_(result.fetchone(), None)
+ eq_(result.fetchone(), None)
+ eq_(result.fetchone(), None)
+ result.close()
+ assert_raises_message(
+ exc.ResourceClosedError,
+ "This result object is closed.",
+ result.fetchone
+ )
+
+ def test_row_case_sensitive(self):
+ row = testing.db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive")
+ ])
+ ).first()
+
+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ not_in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+
+ assert_raises(
+ KeyError,
+ lambda: row["Case_insensitive"]
+ )
+ assert_raises(
+ KeyError,
+ lambda: row["casesensitive"]
+ )
+
+ def test_row_case_sensitive_unoptimized(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": True})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols")
+ ])
+ ).first()
+
+ eq_(
+ list(row.keys()),
+ ["case_insensitive", "CaseSensitive", "screw_up_the_cols"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ not_in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["screw_up_the_cols"], 3)
+
+ assert_raises(KeyError, lambda: row["Case_insensitive"])
+ assert_raises(KeyError, lambda: row["casesensitive"])
+ assert_raises(KeyError, lambda: row["screw_UP_the_cols"])
+
+ def test_row_case_insensitive(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": False})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive")
+ ])
+ ).first()
+
+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["Case_insensitive"], 1)
+ eq_(row["casesensitive"], 2)
+
+ def test_row_case_insensitive_unoptimized(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": False})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols")
+ ])
+ ).first()
+
+ eq_(
+ list(row.keys()),
+ ["case_insensitive", "CaseSensitive", "screw_up_the_cols"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["screw_up_the_cols"], 3)
+ eq_(row["Case_insensitive"], 1)
+ eq_(row["casesensitive"], 2)
+ eq_(row["screw_UP_the_cols"], 3)
+
+ def test_row_as_args(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='john')
+ r = users.select(users.c.user_id == 1).execute().first()
+ users.delete().execute()
+ users.insert().execute(r)
+ eq_(users.select().execute().fetchall(), [(1, 'john')])
+
+ def test_result_as_args(self):
+ users = self.tables.users
+ users2 = self.tables.users2
+
+ users.insert().execute([
+ dict(user_id=1, user_name='john'),
+ dict(user_id=2, user_name='ed')])
+ r = users.select().execute()
+ users2.insert().execute(list(r))
+ eq_(
+ users2.select().order_by(users2.c.user_id).execute().fetchall(),
+ [(1, 'john'), (2, 'ed')]
+ )
+
+ users2.delete().execute()
+ r = users.select().execute()
+ users2.insert().execute(*list(r))
+ eq_(
+ users2.select().order_by(users2.c.user_id).execute().fetchall(),
+ [(1, 'john'), (2, 'ed')]
+ )
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_ambiguous_column(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ users.insert().execute(user_id=1, user_name='john')
+ result = users.outerjoin(addresses).select().execute()
+ r = result.first()
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r['user_id']
+ )
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r[users.c.user_id]
+ )
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r[addresses.c.user_id]
+ )
+
+ # try to trick it - fake_table isn't in the result!
+ # we get the correct error
+ fake_table = Table('fake', MetaData(), Column('user_id', Integer))
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Could not locate column in row for column 'fake.user_id'",
+ lambda: r[fake_table.c.user_id]
+ )
+
+ r = util.pickle.loads(util.pickle.dumps(r))
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r['user_id']
+ )
+
+ result = users.outerjoin(addresses).select().execute()
+ result = _result.BufferedColumnResultProxy(result.context)
+ r = result.first()
+ assert isinstance(r, _result.BufferedColumnRow)
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: r['user_id']
+ )
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_ambiguous_column_by_col(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='john')
+ ua = users.alias()
+ u2 = users.alias()
+ result = select([users.c.user_id, ua.c.user_id]).execute()
+ row = result.first()
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: row[users.c.user_id]
+ )
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: row[ua.c.user_id]
+ )
+
+ # Unfortunately, this fails -
+ # we'd like
+ # "Could not locate column in row"
+ # to be raised here, but the check for
+ # "common column" in _compare_name_for_result()
+ # has other requirements to be more liberal.
+ # Ultimately the
+ # expression system would need a way to determine
+ # if given two columns in a "proxy" relationship, if they
+ # refer to a different parent table
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name",
+ lambda: row[u2.c.user_id]
+ )
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_ambiguous_column_contains(self):
+ users = self.tables.users
+ addresses = self.tables.addresses
+
+ # ticket 2702. in 0.7 we'd get True, False.
+ # in 0.8, both columns are present so it's True;
+ # but when they're fetched you'll get the ambiguous error.
+ users.insert().execute(user_id=1, user_name='john')
+ result = select([users.c.user_id, addresses.c.user_id]).\
+ select_from(users.outerjoin(addresses)).execute()
+ row = result.first()
+
+ eq_(
+ set([users.c.user_id in row, addresses.c.user_id in row]),
+ set([True])
+ )
+
+ def test_ambiguous_column_by_col_plus_label(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='john')
+ result = select(
+ [users.c.user_id,
+ type_coerce(users.c.user_id, Integer).label('foo')]).execute()
+ row = result.first()
+ eq_(
+ row[users.c.user_id], 1
+ )
+ eq_(
+ row[1], 1
+ )
+
+ def test_fetch_partial_result_map(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='ed')
+
+ t = text("select * from users").columns(
+ user_name=String()
+ )
+ eq_(
+ testing.db.execute(t).fetchall(), [(7, 'ed')]
+ )
+
+ def test_fetch_unordered_result_map(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='ed')
+
+ class Goofy1(TypeDecorator):
+ impl = String
+
+ def process_result_value(self, value, dialect):
+ return value + "a"
+
+ class Goofy2(TypeDecorator):
+ impl = String
+
+ def process_result_value(self, value, dialect):
+ return value + "b"
+
+ class Goofy3(TypeDecorator):
+ impl = String
+
+ def process_result_value(self, value, dialect):
+ return value + "c"
+
+ t = text(
+ "select user_name as a, user_name as b, "
+ "user_name as c from users").columns(
+ a=Goofy1(), b=Goofy2(), c=Goofy3()
+ )
+ eq_(
+ testing.db.execute(t).fetchall(), [
+ ('eda', 'edb', 'edc')
+ ]
+ )
+
+ @testing.requires.subqueries
+ def test_column_label_targeting(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=7, user_name='ed')
+
+ for s in (
+ users.select().alias('foo'),
+ users.select().alias(users.name),
+ ):
+ row = s.select(use_labels=True).execute().first()
+ eq_(row[s.c.user_id], 7)
+ eq_(row[s.c.user_name], 'ed')
+
+ def test_keys(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ result = users.select().execute()
+ eq_(
+ result.keys(),
+ ['user_id', 'user_name']
+ )
+ row = result.first()
+ eq_(
+ row.keys(),
+ ['user_id', 'user_name']
+ )
+
+ def test_keys_anon_labels(self):
+ """test [ticket:3483]"""
+
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ result = testing.db.execute(
+ select([
+ users.c.user_id,
+ users.c.user_name.label(None),
+ func.count(literal_column('1'))]).
+ group_by(users.c.user_id, users.c.user_name)
+ )
+
+ eq_(
+ result.keys(),
+ ['user_id', 'user_name_1', 'count_1']
+ )
+ row = result.first()
+ eq_(
+ row.keys(),
+ ['user_id', 'user_name_1', 'count_1']
+ )
+
+ def test_items(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = users.select().execute().first()
+ eq_(
+ [(x[0].lower(), x[1]) for x in list(r.items())],
+ [('user_id', 1), ('user_name', 'foo')])
+
+ def test_len(self):
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = users.select().execute().first()
+ eq_(len(r), 2)
+
+ r = testing.db.execute('select user_name, user_id from users'). \
+ first()
+ eq_(len(r), 2)
+ r = testing.db.execute('select user_name from users').first()
+ eq_(len(r), 1)
+
+ def test_sorting_in_python(self):
+ users = self.tables.users
+
+ users.insert().execute(
+ dict(user_id=1, user_name='foo'),
+ dict(user_id=2, user_name='bar'),
+ dict(user_id=3, user_name='def'),
+ )
+
+ rows = users.select().order_by(users.c.user_name).execute().fetchall()
+
+ eq_(rows, [(2, 'bar'), (3, 'def'), (1, 'foo')])
+
+ eq_(sorted(rows), [(1, 'foo'), (2, 'bar'), (3, 'def')])
+
+ def test_column_order_with_simple_query(self):
+ # should return values in column definition order
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = users.select(users.c.user_id == 1).execute().first()
+ eq_(r[0], 1)
+ eq_(r[1], 'foo')
+ eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
+ eq_(list(r.values()), [1, 'foo'])
+
+ def test_column_order_with_text_query(self):
+ # should return values in query order
+ users = self.tables.users
+
+ users.insert().execute(user_id=1, user_name='foo')
+ r = testing.db.execute('select user_name, user_id from users'). \
+ first()
+ eq_(r[0], 'foo')
+ eq_(r[1], 1)
+ eq_([x.lower() for x in list(r.keys())], ['user_name', 'user_id'])
+ eq_(list(r.values()), ['foo', 1])
+
+ @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
+ @testing.crashes('firebird', 'An identifier must begin with a letter')
+ @testing.provide_metadata
+ def test_column_accessor_shadow(self):
+ shadowed = Table(
+ 'test_shadowed', self.metadata,
+ Column('shadow_id', INT, primary_key=True),
+ Column('shadow_name', VARCHAR(20)),
+ Column('parent', VARCHAR(20)),
+ Column('row', VARCHAR(40)),
+ Column('_parent', VARCHAR(20)),
+ Column('_row', VARCHAR(20)),
+ )
+ self.metadata.create_all()
+ shadowed.insert().execute(
+ shadow_id=1, shadow_name='The Shadow', parent='The Light',
+ row='Without light there is no shadow',
+ _parent='Hidden parent', _row='Hidden row')
+ r = shadowed.select(shadowed.c.shadow_id == 1).execute().first()
+
+ eq_(r.shadow_id, 1)
+ eq_(r['shadow_id'], 1)
+ eq_(r[shadowed.c.shadow_id], 1)
+
+ eq_(r.shadow_name, 'The Shadow')
+ eq_(r['shadow_name'], 'The Shadow')
+ eq_(r[shadowed.c.shadow_name], 'The Shadow')
+
+ eq_(r.parent, 'The Light')
+ eq_(r['parent'], 'The Light')
+ eq_(r[shadowed.c.parent], 'The Light')
+
+ eq_(r.row, 'Without light there is no shadow')
+ eq_(r['row'], 'Without light there is no shadow')
+ eq_(r[shadowed.c.row], 'Without light there is no shadow')
+
+ eq_(r['_parent'], 'Hidden parent')
+ eq_(r['_row'], 'Hidden row')
+
+
+class KeyTargetingTest(fixtures.TablesTest):
+ run_inserts = 'once'
+ run_deletes = None
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'keyed1', metadata, Column("a", CHAR(2), key="b"),
+ Column("c", CHAR(2), key="q")
+ )
+ Table('keyed2', metadata, Column("a", CHAR(2)), Column("b", CHAR(2)))
+ Table('keyed3', metadata, Column("a", CHAR(2)), Column("d", CHAR(2)))
+ Table('keyed4', metadata, Column("b", CHAR(2)), Column("q", CHAR(2)))
+ Table('content', metadata, Column('t', String(30), key="type"))
+ Table('bar', metadata, Column('ctype', String(30), key="content_type"))
+
+ if testing.requires.schemas.enabled:
+ Table(
+ 'wschema', metadata,
+ Column("a", CHAR(2), key="b"),
+ Column("c", CHAR(2), key="q"),
+ schema=testing.config.test_schema
+ )
+
+ @classmethod
+ def insert_data(cls):
+ cls.tables.keyed1.insert().execute(dict(b="a1", q="c1"))
+ cls.tables.keyed2.insert().execute(dict(a="a2", b="b2"))
+ cls.tables.keyed3.insert().execute(dict(a="a3", d="d3"))
+ cls.tables.keyed4.insert().execute(dict(b="b4", q="q4"))
+ cls.tables.content.insert().execute(type="t1")
+
+ if testing.requires.schemas.enabled:
+ cls.tables[
+ '%s.wschema' % testing.config.test_schema].insert().execute(
+ dict(b="a1", q="c1"))
+
+ @testing.requires.schemas
+ def test_keyed_accessor_wschema(self):
+ keyed1 = self.tables['%s.wschema' % testing.config.test_schema]
+ row = testing.db.execute(keyed1.select()).first()
+
+ eq_(row.b, "a1")
+ eq_(row.q, "c1")
+ eq_(row.a, "a1")
+ eq_(row.c, "c1")
+
+ def test_keyed_accessor_single(self):
+ keyed1 = self.tables.keyed1
+ row = testing.db.execute(keyed1.select()).first()
+
+ eq_(row.b, "a1")
+ eq_(row.q, "c1")
+ eq_(row.a, "a1")
+ eq_(row.c, "c1")
+
+ def test_keyed_accessor_single_labeled(self):
+ keyed1 = self.tables.keyed1
+ row = testing.db.execute(keyed1.select().apply_labels()).first()
+
+ eq_(row.keyed1_b, "a1")
+ eq_(row.keyed1_q, "c1")
+ eq_(row.keyed1_a, "a1")
+ eq_(row.keyed1_c, "c1")
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_keyed_accessor_composite_conflict_2(self):
+ keyed1 = self.tables.keyed1
+ keyed2 = self.tables.keyed2
+
+ row = testing.db.execute(select([keyed1, keyed2])).first()
+ # row.b is unambiguous
+ eq_(row.b, "b2")
+ # row.a is ambiguous
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambig",
+ getattr, row, "a"
+ )
+
+ def test_keyed_accessor_composite_names_precedent(self):
+ keyed1 = self.tables.keyed1
+ keyed4 = self.tables.keyed4
+
+ row = testing.db.execute(select([keyed1, keyed4])).first()
+ eq_(row.b, "b4")
+ eq_(row.q, "q4")
+ eq_(row.a, "a1")
+ eq_(row.c, "c1")
+
+ @testing.requires.duplicate_names_in_cursor_description
+ def test_keyed_accessor_composite_keys_precedent(self):
+ keyed1 = self.tables.keyed1
+ keyed3 = self.tables.keyed3
+
+ row = testing.db.execute(select([keyed1, keyed3])).first()
+ eq_(row.q, "c1")
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name 'b'",
+ getattr, row, "b"
+ )
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Ambiguous column name 'a'",
+ getattr, row, "a"
+ )
+ eq_(row.d, "d3")
+
+ def test_keyed_accessor_composite_labeled(self):
+ keyed1 = self.tables.keyed1
+ keyed2 = self.tables.keyed2
+
+ row = testing.db.execute(select([keyed1, keyed2]).apply_labels()). \
+ first()
+ eq_(row.keyed1_b, "a1")
+ eq_(row.keyed1_a, "a1")
+ eq_(row.keyed1_q, "c1")
+ eq_(row.keyed1_c, "c1")
+ eq_(row.keyed2_a, "a2")
+ eq_(row.keyed2_b, "b2")
+ assert_raises(KeyError, lambda: row['keyed2_c'])
+ assert_raises(KeyError, lambda: row['keyed2_q'])
+
+ def test_column_label_overlap_fallback(self):
+ content, bar = self.tables.content, self.tables.bar
+ row = testing.db.execute(
+ select([content.c.type.label("content_type")])).first()
+
+ not_in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+
+ in_(sql.column('content_type'), row)
+
+ row = testing.db.execute(select([func.now().label("content_type")])). \
+ first()
+ not_in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+ in_(sql.column('content_type'), row)
+
+ def test_column_label_overlap_fallback_2(self):
+ content, bar = self.tables.content, self.tables.bar
+ row = testing.db.execute(content.select(use_labels=True)).first()
+ in_(content.c.type, row)
+ not_in_(bar.c.content_type, row)
+ not_in_(sql.column('content_type'), row)
+
+ def test_columnclause_schema_column_one(self):
+ keyed2 = self.tables.keyed2
+
+ # this is addressed by [ticket:2932]
+ # ColumnClause._compare_name_for_result allows the
+ # columns which the statement is against to be lightweight
+ # cols, which results in a more liberal comparison scheme
+ a, b = sql.column('a'), sql.column('b')
+ stmt = select([a, b]).select_from(table("keyed2"))
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+
+ def test_columnclause_schema_column_two(self):
+ keyed2 = self.tables.keyed2
+
+ a, b = sql.column('a'), sql.column('b')
+ stmt = select([keyed2.c.a, keyed2.c.b])
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+
+ def test_columnclause_schema_column_three(self):
+ keyed2 = self.tables.keyed2
+
+ # this is also addressed by [ticket:2932]
+
+ a, b = sql.column('a'), sql.column('b')
+ stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR)
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+ in_(stmt.c.a, row)
+ in_(stmt.c.b, row)
+
+ def test_columnclause_schema_column_four(self):
+ keyed2 = self.tables.keyed2
+
+ # this is also addressed by [ticket:2932]
+
+ a, b = sql.column('keyed2_a'), sql.column('keyed2_b')
+ stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
+ a, b)
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(a, row)
+ in_(b, row)
+ in_(stmt.c.keyed2_a, row)
+ in_(stmt.c.keyed2_b, row)
+
+ def test_columnclause_schema_column_five(self):
+ keyed2 = self.tables.keyed2
+
+ # this is also addressed by [ticket:2932]
+
+ stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
+ keyed2_a=CHAR, keyed2_b=CHAR)
+ row = testing.db.execute(stmt).first()
+
+ in_(keyed2.c.a, row)
+ in_(keyed2.c.b, row)
+ in_(stmt.c.keyed2_a, row)
+ in_(stmt.c.keyed2_b, row)
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index 3390f4a77..b9cbbf480 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -458,6 +458,26 @@ class SelectableTest(
assert u1.corresponding_column(table2.c.col1) is u1.c._all_columns[0]
assert u1.corresponding_column(table2.c.col3) is u1.c._all_columns[2]
+ @testing.emits_warning("Column 'col1'")
+ def test_union_alias_dupe_keys_grouped(self):
+ s1 = select([table1.c.col1, table1.c.col2, table2.c.col1]).\
+ limit(1).alias()
+ s2 = select([table2.c.col1, table2.c.col2, table2.c.col3]).limit(1)
+ u1 = union(s1, s2)
+
+ assert u1.corresponding_column(
+ s1.c._all_columns[0]) is u1.c._all_columns[0]
+ assert u1.corresponding_column(s2.c.col1) is u1.c._all_columns[0]
+ assert u1.corresponding_column(s1.c.col2) is u1.c.col2
+ assert u1.corresponding_column(s2.c.col2) is u1.c.col2
+
+ assert u1.corresponding_column(s2.c.col3) is u1.c._all_columns[2]
+
+ # this differs from the non-alias test because table2.c.col1 is
+ # more directly at s2.c.col1 than it is s1.c.col1.
+ assert u1.corresponding_column(table2.c.col1) is u1.c._all_columns[0]
+ assert u1.corresponding_column(table2.c.col3) is u1.c._all_columns[2]
+
def test_select_union(self):
# like testaliasunion, but off a Select off the union.
@@ -912,10 +932,10 @@ class AnonLabelTest(fixtures.TestBase):
c1 = func.count('*')
assert c1.label(None) is not c1
- eq_(str(select([c1])), "SELECT count(:param_1) AS count_1")
+ eq_(str(select([c1])), "SELECT count(:count_2) AS count_1")
c2 = select([c1]).compile()
- eq_(str(select([c1.label(None)])), "SELECT count(:param_1) AS count_1")
+ eq_(str(select([c1.label(None)])), "SELECT count(:count_2) AS count_1")
def test_named_labels_named_column(self):
c1 = column('x')
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index 2545dec59..f1fb611fb 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -1,5 +1,6 @@
# coding: utf-8
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, expect_warnings
+from sqlalchemy.testing import eq_, is_, assert_raises, \
+ assert_raises_message, expect_warnings
import decimal
import datetime
import os
@@ -9,9 +10,10 @@ from sqlalchemy import (
and_, func, Date, LargeBinary, literal, cast, text, Enum,
type_coerce, VARCHAR, Time, DateTime, BigInteger, SmallInteger, BOOLEAN,
BLOB, NCHAR, NVARCHAR, CLOB, TIME, DATE, DATETIME, TIMESTAMP, SMALLINT,
- INTEGER, DECIMAL, NUMERIC, FLOAT, REAL)
+ INTEGER, DECIMAL, NUMERIC, FLOAT, REAL, Array)
from sqlalchemy.sql import ddl
-
+from sqlalchemy.sql import visitors
+from sqlalchemy import inspection
from sqlalchemy import exc, types, util, dialects
for name in dialects.__all__:
__import__("sqlalchemy.dialects.%s" % name)
@@ -25,6 +27,7 @@ from sqlalchemy.testing import AssertsCompiledSQL, AssertsExecutionResults, \
from sqlalchemy.testing.util import picklers
from sqlalchemy.testing.util import round_decimal
from sqlalchemy.testing import fixtures
+from sqlalchemy.testing import mock
class AdaptTest(fixtures.TestBase):
@@ -137,7 +140,7 @@ class AdaptTest(fixtures.TestBase):
for is_down_adaption, typ, target_adaptions in adaptions():
if typ in (types.TypeDecorator, types.TypeEngine, types.Variant):
continue
- elif typ is dialects.postgresql.ARRAY:
+ elif issubclass(typ, Array):
t1 = typ(String)
else:
t1 = typ()
@@ -187,12 +190,28 @@ class AdaptTest(fixtures.TestBase):
for typ in self._all_types():
if typ in (types.TypeDecorator, types.TypeEngine, types.Variant):
continue
- elif typ is dialects.postgresql.ARRAY:
+ elif issubclass(typ, Array):
t1 = typ(String)
else:
t1 = typ()
repr(t1)
+ def test_adapt_constructor_copy_override_kw(self):
+ """test that adapt() can accept kw args that override
+ the state of the original object.
+
+ This essentially is testing the behavior of util.constructor_copy().
+
+ """
+ t1 = String(length=50, convert_unicode=False)
+ t2 = t1.adapt(Text, convert_unicode=True)
+ eq_(
+ t2.length, 50
+ )
+ eq_(
+ t2.convert_unicode, True
+ )
+
class TypeAffinityTest(fixtures.TestBase):
@@ -771,6 +790,68 @@ class TypeCoerceCastTest(fixtures.TablesTest):
[('BIND_INd1', 'BIND_INd1BIND_OUT')]
)
+ def test_cast_replace_col_w_bind(self):
+ self._test_replace_col_w_bind(cast)
+
+ def test_type_coerce_replace_col_w_bind(self):
+ self._test_replace_col_w_bind(type_coerce)
+
+ def _test_replace_col_w_bind(self, coerce_fn):
+ MyType = self.MyType
+
+ t = self.tables.t
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ stmt = select([t.c.data, coerce_fn(t.c.data, MyType)])
+
+ def col_to_bind(col):
+ if col is t.c.data:
+ return bindparam(None, "x", type_=col.type, unique=True)
+ return None
+
+ # ensure we evaulate the expression so that we can see
+ # the clone resets this info
+ stmt.compile()
+
+ new_stmt = visitors.replacement_traverse(stmt, {}, col_to_bind)
+
+ # original statement
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [('BIND_INd1', 'BIND_INd1BIND_OUT')]
+ )
+
+ # replaced with binds; CAST can't affect the bound parameter
+ # on the way in here
+ eq_(
+ testing.db.execute(new_stmt).fetchall(),
+ [('x', 'BIND_INxBIND_OUT')] if coerce_fn is type_coerce
+ else [('x', 'xBIND_OUT')]
+ )
+
+ def test_cast_bind(self):
+ self._test_bind(cast)
+
+ def test_type_bind(self):
+ self._test_bind(type_coerce)
+
+ def _test_bind(self, coerce_fn):
+ MyType = self.MyType
+
+ t = self.tables.t
+ t.insert().values(data=coerce_fn('d1', MyType)).execute()
+
+ stmt = select([
+ bindparam(None, "x", String(50), unique=True),
+ coerce_fn(bindparam(None, "x", String(50), unique=True), MyType)
+ ])
+
+ eq_(
+ testing.db.execute(stmt).fetchall(),
+ [('x', 'BIND_INxBIND_OUT')] if coerce_fn is type_coerce
+ else [('x', 'xBIND_OUT')]
+ )
+
@testing.fails_on(
"oracle", "ORA-00906: missing left parenthesis - "
"seems to be CAST(:param AS type)")
@@ -804,6 +885,7 @@ class TypeCoerceCastTest(fixtures.TablesTest):
[('BIND_INd1BIND_OUT', )])
+
class VariantTest(fixtures.TestBase, AssertsCompiledSQL):
def setup(self):
@@ -1160,16 +1242,13 @@ class EnumTest(AssertsCompiledSQL, fixtures.TestBase):
def __init__(self, name):
self.name = name
- class MyEnum(types.SchemaType, TypeDecorator):
+ class MyEnum(TypeDecorator):
def __init__(self, values):
self.impl = Enum(
*[v.name for v in values], name="myenum",
native_enum=False)
- def _set_table(self, table, column):
- self.impl._set_table(table, column)
-
# future method
def process_literal_param(self, value, dialect):
return value.name
@@ -1326,6 +1405,68 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
with open(f, mode='rb') as o:
return o.read()
+
+class ArrayTest(fixtures.TestBase):
+
+ def _myarray_fixture(self):
+ class MyArray(Array):
+ pass
+ return MyArray
+
+ def test_array_index_map_dimensions(self):
+ col = column('x', Array(Integer, dimensions=3))
+ is_(
+ col[5].type._type_affinity, Array
+ )
+ eq_(
+ col[5].type.dimensions, 2
+ )
+ is_(
+ col[5][6].type._type_affinity, Array
+ )
+ eq_(
+ col[5][6].type.dimensions, 1
+ )
+ is_(
+ col[5][6][7].type._type_affinity, Integer
+ )
+
+ def test_array_getitem_single_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', Array(Integer)),
+ Column('strarr', Array(String)),
+ )
+ is_(arrtable.c.intarr[1].type._type_affinity, Integer)
+ is_(arrtable.c.strarr[1].type._type_affinity, String)
+
+ def test_array_getitem_slice_type(self):
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', Array(Integer)),
+ Column('strarr', Array(String)),
+ )
+ is_(arrtable.c.intarr[1:3].type._type_affinity, Array)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, Array)
+
+ def test_array_getitem_slice_type_dialect_level(self):
+ MyArray = self._myarray_fixture()
+ m = MetaData()
+ arrtable = Table(
+ 'arrtable', m,
+ Column('intarr', MyArray(Integer)),
+ Column('strarr', MyArray(String)),
+ )
+ is_(arrtable.c.intarr[1:3].type._type_affinity, Array)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, Array)
+
+ # but the slice returns the actual type
+ assert isinstance(arrtable.c.intarr[1:3].type, MyArray)
+ assert isinstance(arrtable.c.strarr[1:3].type, MyArray)
+
+
test_table = meta = MyCustomType = MyTypeDec = None
@@ -1631,6 +1772,34 @@ class ExpressionTest(
assert distinct(test_table.c.data).type == test_table.c.data.type
assert test_table.c.data.distinct().type == test_table.c.data.type
+ def test_detect_coercion_of_builtins(self):
+ @inspection._self_inspects
+ class SomeSQLAThing(object):
+ def __repr__(self):
+ return "some_sqla_thing()"
+
+ class SomeOtherThing(object):
+ pass
+
+ assert_raises_message(
+ exc.ArgumentError,
+ r"Object some_sqla_thing\(\) is not legal as a SQL literal value",
+ lambda: column('a', String) == SomeSQLAThing()
+ )
+
+ is_(
+ bindparam('x', SomeOtherThing()).type,
+ types.NULLTYPE
+ )
+
+ def test_detect_coercion_not_fooled_by_mock(self):
+ m1 = mock.Mock()
+ is_(
+ bindparam('x', m1).type,
+ types.NULLTYPE
+ )
+
+
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -1899,12 +2068,9 @@ class BooleanTest(
def __init__(self, value):
self.value = value
- class MyBool(types.SchemaType, TypeDecorator):
+ class MyBool(TypeDecorator):
impl = Boolean()
- def _set_table(self, table, column):
- self.impl._set_table(table, column)
-
# future method
def process_literal_param(self, value, dialect):
return value.value