summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCarlos Rivas <carlos@twobitcoder.com>2016-01-26 13:45:31 -0800
committerCarlos Rivas <carlos@twobitcoder.com>2016-01-26 13:45:31 -0800
commitc6d630ca819239bf1b18bd6e51f265fb1be951c9 (patch)
treee30838e4e462d7994cc69d0c281a2d4a88b89edf
parent28365040ace29c9ceea28946ed19f07c3a4fcefc (diff)
parent8163de4cc9e01460d3476b9fb3ed14a5b3e70bae (diff)
downloadsqlalchemy-c6d630ca819239bf1b18bd6e51f265fb1be951c9.tar.gz
Merged zzzeek/sqlalchemy into master
-rw-r--r--doc/build/changelog/changelog_09.rst2
-rw-r--r--doc/build/changelog/changelog_10.rst101
-rw-r--r--doc/build/changelog/changelog_11.rst180
-rw-r--r--doc/build/changelog/migration_09.rst8
-rw-r--r--doc/build/changelog/migration_11.rst660
-rw-r--r--doc/build/core/connections.rst66
-rw-r--r--doc/build/core/metadata.rst4
-rw-r--r--doc/build/core/selectable.rst1
-rw-r--r--doc/build/core/tutorial.rst163
-rw-r--r--doc/build/core/type_basics.rst23
-rw-r--r--doc/build/dialects/mysql.rst2
-rw-r--r--doc/build/dialects/postgresql.rst5
-rw-r--r--doc/build/orm/basic_relationships.rst64
-rw-r--r--doc/build/orm/collections.rst4
-rw-r--r--doc/build/orm/extensions/declarative/inheritance.rst88
-rw-r--r--doc/build/orm/inheritance.rst294
-rw-r--r--doc/build/orm/relationship_persistence.rst9
-rw-r--r--doc/build/orm/session_basics.rst2
-rw-r--r--doc/build/orm/session_events.rst2
-rw-r--r--doc/build/orm/tutorial.rst88
-rw-r--r--lib/sqlalchemy/__init__.py4
-rw-r--r--lib/sqlalchemy/cextension/resultproxy.c13
-rw-r--r--lib/sqlalchemy/dialects/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/__init__.py6
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py1535
-rw-r--r--lib/sqlalchemy/dialects/mysql/enumerated.py307
-rw-r--r--lib/sqlalchemy/dialects/mysql/json.py90
-rw-r--r--lib/sqlalchemy/dialects/mysql/reflection.py449
-rw-r--r--lib/sqlalchemy/dialects/mysql/types.py766
-rw-r--r--lib/sqlalchemy/dialects/oracle/zxjdbc.py3
-rw-r--r--lib/sqlalchemy/dialects/postgresql/array.py42
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py30
-rw-r--r--lib/sqlalchemy/dialects/postgresql/ext.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/hstore.py17
-rw-r--r--lib/sqlalchemy/dialects/postgresql/json.py206
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py1
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py1
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py27
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py6
-rw-r--r--lib/sqlalchemy/engine/__init__.py3
-rw-r--r--lib/sqlalchemy/engine/base.py69
-rw-r--r--lib/sqlalchemy/engine/default.py37
-rw-r--r--lib/sqlalchemy/engine/interfaces.py107
-rw-r--r--lib/sqlalchemy/engine/reflection.py3
-rw-r--r--lib/sqlalchemy/engine/result.py405
-rw-r--r--lib/sqlalchemy/engine/strategies.py12
-rw-r--r--lib/sqlalchemy/engine/url.py10
-rw-r--r--lib/sqlalchemy/ext/automap.py2
-rw-r--r--lib/sqlalchemy/ext/compiler.py12
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py16
-rw-r--r--lib/sqlalchemy/orm/mapper.py50
-rw-r--r--lib/sqlalchemy/orm/persistence.py2
-rw-r--r--lib/sqlalchemy/orm/query.py27
-rw-r--r--lib/sqlalchemy/orm/relationships.py19
-rw-r--r--lib/sqlalchemy/orm/scoping.py15
-rw-r--r--lib/sqlalchemy/orm/strategies.py2
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py52
-rw-r--r--lib/sqlalchemy/sql/__init__.py1
-rw-r--r--lib/sqlalchemy/sql/base.py29
-rw-r--r--lib/sqlalchemy/sql/compiler.py190
-rw-r--r--lib/sqlalchemy/sql/ddl.py21
-rw-r--r--lib/sqlalchemy/sql/default_comparator.py28
-rw-r--r--lib/sqlalchemy/sql/elements.py93
-rw-r--r--lib/sqlalchemy/sql/functions.py23
-rw-r--r--lib/sqlalchemy/sql/operators.py15
-rw-r--r--lib/sqlalchemy/sql/schema.py56
-rw-r--r--lib/sqlalchemy/sql/selectable.py3
-rw-r--r--lib/sqlalchemy/sql/sqltypes.py316
-rw-r--r--lib/sqlalchemy/testing/__init__.py2
-rw-r--r--lib/sqlalchemy/testing/assertions.py15
-rw-r--r--lib/sqlalchemy/testing/assertsql.py9
-rw-r--r--lib/sqlalchemy/testing/plugin/pytestplugin.py4
-rw-r--r--lib/sqlalchemy/testing/profiling.py5
-rw-r--r--lib/sqlalchemy/testing/requirements.py13
-rw-r--r--lib/sqlalchemy/testing/suite/test_types.py257
-rw-r--r--lib/sqlalchemy/types.py5
-rw-r--r--lib/sqlalchemy/util/compat.py24
-rw-r--r--regen_callcounts.tox.ini16
-rw-r--r--setup.cfg15
-rw-r--r--setup.py10
-rw-r--r--test/aaa_profiling/test_resultset.py5
-rw-r--r--test/base/test_tutorials.py5
-rw-r--r--test/base/test_utils.py59
-rw-r--r--test/dialect/mssql/test_compiler.py2
-rw-r--r--test/dialect/mssql/test_types.py12
-rw-r--r--test/dialect/mysql/test_compiler.py6
-rw-r--r--test/dialect/mysql/test_query.py28
-rw-r--r--test/dialect/mysql/test_reflection.py3
-rw-r--r--test/dialect/mysql/test_types.py46
-rw-r--r--test/dialect/postgresql/test_compiler.py18
-rw-r--r--test/dialect/postgresql/test_types.py120
-rw-r--r--test/dialect/test_sybase.py14
-rw-r--r--test/engine/test_execute.py186
-rw-r--r--test/engine/test_parseconnect.py43
-rw-r--r--test/engine/test_reconnect.py1
-rw-r--r--test/engine/test_reflection.py21
-rw-r--r--test/orm/inheritance/test_basic.py238
-rw-r--r--test/orm/inheritance/test_concrete.py39
-rw-r--r--test/orm/test_deferred.py58
-rw-r--r--test/orm/test_options.py128
-rw-r--r--test/orm/test_query.py68
-rw-r--r--test/profiles.txt922
-rw-r--r--test/requirements.py12
-rw-r--r--test/sql/test_compiler.py200
-rw-r--r--test/sql/test_functions.py8
-rw-r--r--test/sql/test_join_rewriting.py5
-rw-r--r--test/sql/test_metadata.py22
-rw-r--r--test/sql/test_operators.py149
-rw-r--r--test/sql/test_resultset.py260
-rw-r--r--test/sql/test_rowcount.py17
-rw-r--r--test/sql/test_selectable.py12
-rw-r--r--test/sql/test_type_expressions.py7
-rw-r--r--test/sql/test_types.py169
-rw-r--r--tox.ini62
115 files changed, 6926 insertions, 3287 deletions
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst
index ace22424e..3ac0ab0b4 100644
--- a/doc/build/changelog/changelog_09.rst
+++ b/doc/build/changelog/changelog_09.rst
@@ -17,7 +17,7 @@
.. change::
:tags: bug, oracle, py3k
:tickets: 3491
- :versions: 1.1.0b1, 1.0.9
+ :versions: 1.0.9
Fixed support for cx_Oracle version 5.2, which was tripping
up SQLAlchemy's version detection under Python 3 and inadvertently
diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst
index 25592a3b1..a0b1ad957 100644
--- a/doc/build/changelog/changelog_10.rst
+++ b/doc/build/changelog/changelog_10.rst
@@ -16,12 +16,64 @@
:start-line: 5
.. changelog::
+ :version: 1.0.12
+ :released:
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3632
+
+ Fixed bug in :class:`.Table` metadata construct which appeared
+ around the 0.9 series where adding columns to a :class:`.Table`
+ that was unpickled would fail to correctly establish the
+ :class:`.Column` within the 'c' collection, leading to issues in
+ areas such as ORM configuration. This could impact use cases such
+ as ``extend_existing`` and others.
+
+ .. change::
+ :tags: bug, py3k
+ :tickets: 3625
+
+ Fixed bug where some exception re-raise scenarios would attach
+ the exception to itself as the "cause"; while the Python 3 interpreter
+ is OK with this, it could cause endless loops in iPython.
+
+ .. change::
+ :tags: bug, mssql
+ :tickets: 3624
+ :pullreq: bitbucket:70
+
+ Fixed the syntax of the :func:`.extract` function when used on
+ MSSQL against a datetime value; the quotes around the keyword
+ are removed. Pull request courtesy Guillaume Doumenc.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3623
+
+ Fixed regression since 0.9 where the 0.9 style loader options
+ system failed to accommodate for multiple :func:`.undefer_group`
+ loader options in a single query. Multiple :func:`.undefer_group`
+ options will now be taken into account even against the same
+ entity.
+
+ .. change::
+ :tags: bug, mssql, firebird
+ :tickets: 3622
+
+ Fixed 1.0 regression where the eager fetch of cursor.rowcount was
+ no longer called for an UPDATE or DELETE statement emitted via plain
+ text or via the :func:`.text` construct, affecting those drivers
+ that erase cursor.rowcount once the cursor is closed such as SQL
+ Server ODBC and Firebird drivers.
+
+
+.. changelog::
:version: 1.0.11
:released: December 22, 2015
.. change::
:tags: bug, mysql
- :versions: 1.1.0b1
:tickets: 3613
An adjustment to the regular expression used to parse MySQL views,
@@ -31,7 +83,6 @@
.. change::
:tags: bug, mysql
- :versions: 1.1.0b1
:pullreq: github:222
Added new reserved words for MySQL 5.7 to the MySQL dialect,
@@ -41,7 +92,6 @@
.. change::
:tags: bug, ext
:tickets: 3605
- :versions: 1.1.0b1
Further fixes to :ticket:`3605`, pop method on :class:`.MutableDict`,
where the "default" argument was not included.
@@ -49,7 +99,6 @@
.. change::
:tags: bug, ext
:tickets: 3612
- :versions: 1.1.0b1
Fixed bug in baked loader system where the systemwide monkeypatch
for setting up baked lazy loaders would interfere with other
@@ -60,7 +109,6 @@
.. change::
:tags: bug, orm
:tickets: 3611
- :versions: 1.1.0b1
Fixed regression caused in 1.0.10 by the fix for :ticket:`3593` where
the check added for a polymorphic joinedload from a
@@ -70,7 +118,6 @@
.. change::
:tags: bug, orm
:tickets: 3610
- :versions: 1.1.0b1
Fixed bug where :meth:`.Session.bulk_update_mappings` and related
would not bump a version id counter when in use. The experience
@@ -81,7 +128,6 @@
.. change::
:tags: bug, sql
:tickets: 3609
- :versions: 1.1.0b1
Fixed bug in :meth:`.Update.return_defaults` which would cause all
insert-default holding columns not otherwise included in the SET
@@ -91,7 +137,6 @@
.. change::
:tags: bug, orm
:tickets: 3609
- :versions: 1.1.0b1
Major fixes to the :paramref:`.Mapper.eager_defaults` flag, this
flag would not be honored correctly in the case that multiple
@@ -102,7 +147,6 @@
.. change::
:tags: bug, orm
:tickets: 3606
- :versions: 1.1.0b1
Fixed bug where use of the :meth:`.Query.select_from` method would
cause a subsequent call to the :meth:`.Query.with_parent` method to
@@ -115,14 +159,12 @@
.. change::
:tags: bug, ext
:tickets: 3605
- :versions: 1.1.0b1
Added support for the ``dict.pop()`` and ``dict.popitem()`` methods
to the :class:`.mutable.MutableDict` class.
.. change::
:tags: change, tests
- :versions: 1.1.0b1
The ORM and Core tutorials, which have always been in doctest format,
are now exercised within the normal unit test suite in both Python
@@ -131,7 +173,6 @@
.. change::
:tags: bug, sql
:tickets: 3603
- :versions: 1.1.0b1
Fixed issue within the :meth:`.Insert.from_select` construct whereby
the :class:`.Select` construct would have its ``._raw_columns``
@@ -145,7 +186,6 @@
.. change::
:tags: bug, mysql
:tickets: 3602
- :versions: 1.1.0b1
Fixed bug in MySQL reflection where the "fractional sections portion"
of the :class:`.mysql.DATETIME`, :class:`.mysql.TIMESTAMP` and
@@ -156,7 +196,6 @@
.. change::
:tags: bug, orm
:tickets: 3599
- :versions: 1.1.0b1
Fixed issue where post_update on a many-to-one relationship would
fail to emit an UPDATE in the case where the attribute were set to
@@ -165,7 +204,6 @@
.. change::
:tags: bug, sql, postgresql
:tickets: 3598
- :versions: 1.1.0b1
Fixed bug where CREATE TABLE with a no-column table, but a constraint
such as a CHECK constraint would render an erroneous comma in the
@@ -175,7 +213,7 @@
.. change::
:tags: bug, mssql
:tickets: 3585
- :versions: 1.1.0b1
+
Added the error "20006: Write to the server failed" to the list
of disconnect errors for the pymssql driver, as this has been observed
@@ -185,7 +223,7 @@
:tags: bug, postgresql
:pullreq: github:216
:tickets: 3573
- :versions: 1.1.0b1
+
Fixed issue where the "FOR UPDATE OF" Postgresql-specific SELECT
modifier would fail if the referred table had a schema qualifier;
@@ -195,7 +233,7 @@
.. change::
:tags: bug, postgresql
:pullreq: github:215
- :versions: 1.1.0b1
+
Fixed bug where some varieties of SQL expression passed to the
"where" clause of :class:`.postgresql.ExcludeConstraint` would fail
@@ -204,7 +242,7 @@
.. change::
:tags: bug, orm, declarative
:pullreq: github:212
- :versions: 1.1.0b1
+
Fixed bug where in Py2K a unicode literal would not be accepted as the
string name of a class or other argument within declarative using
@@ -213,7 +251,6 @@
.. change::
:tags: bug, mssql
- :versions: 1.1.0b1
:pullreq: github:206
A descriptive ValueError is now raised in the event that SQL server
@@ -223,7 +260,6 @@
.. change::
:tags: bug, py3k
- :versions: 1.1.0b1
:pullreq: github:210, github:218, github:211
Updates to internal getargspec() calls, some py36-related
@@ -234,7 +270,6 @@
.. change::
:tags: bug, ext
- :versions: 1.1.0b1
:tickets: 3597
Fixed an issue in baked queries where the .get() method, used either
@@ -246,7 +281,6 @@
.. change::
:tags: feature, sql
- :versions: 1.1.0b1
:pullreq: github:200
Added support for parameter-ordered SET clauses in an UPDATE
@@ -263,7 +297,6 @@
.. change::
:tags: bug, orm
- :versions: 1.1.0b1
:tickets: 3593
Fixed bug which is actually a regression that occurred between
@@ -274,7 +307,6 @@
.. change::
:tags: bug, orm
- :versions: 1.1.0b1
:tickets: 3592
Fixed joinedload bug which would occur when a. the query includes
@@ -289,7 +321,6 @@
.. change::
:tags: bug, orm
- :versions: 1.1.0b1
:tickets: 2696
A rare case which occurs when a :meth:`.Session.rollback` fails in the
@@ -304,7 +335,6 @@
.. change::
:tags: bug, postgresql
- :versions: 1.1.0b1
:tickets: 3571
Fixed the ``.python_type`` attribute of :class:`.postgresql.INTERVAL`
@@ -315,7 +345,7 @@
.. change::
:tags: bug, mssql
:pullreq: github:213
- :versions: 1.1.0b1
+
Fixed issue where DDL generated for the MSSQL types DATETIME2,
TIME and DATETIMEOFFSET with a precision of "zero" would not generate
@@ -328,7 +358,6 @@
.. change::
:tags: bug, orm, postgresql
- :versions: 1.1.0b1
:tickets: 3556
Fixed regression in 1.0 where new feature of using "executemany"
@@ -340,7 +369,6 @@
.. change::
:tags: feature, ext
- :versions: 1.1.0b1
:tickets: 3551
Added the :paramref:`.AssociationProxy.info` parameter to the
@@ -352,7 +380,6 @@
.. change::
:tags: bug, oracle
- :versions: 1.1.0b1
:tickets: 3548
Fixed bug in Oracle dialect where reflection of tables and other
@@ -363,7 +390,6 @@
.. change::
:tags: feature, orm
- :versions: 1.1.0b1
:pullreq: github:201
Added new method :meth:`.Query.one_or_none`; same as
@@ -372,7 +398,6 @@
.. change::
:tags: bug, orm
- :versions: 1.1.0b1
:tickets: 3539
Fixed rare TypeError which could occur when stringifying certain
@@ -380,7 +405,6 @@
.. change::
:tags: bug, orm
- :versions: 1.1.0b1
:tickets: 3525
Fixed bug in :meth:`.Session.bulk_save_objects` where a mapped
@@ -390,7 +414,6 @@
.. change::
:tags: bug, sql
- :versions: 1.1.0b1
:tickets: 3520
Fixed regression in 1.0-released default-processor for multi-VALUES
@@ -401,7 +424,7 @@
.. change::
:tags: bug, examples
- :versions: 1.1.0b1
+
Fixed two issues in the "history_meta" example where history tracking
could encounter empty history, and where a column keyed to an alternate
@@ -411,7 +434,7 @@
.. change::
:tags: bug, orm
:tickets: 3510
- :versions: 1.1.0b1
+
Fixed 1.0 regression where the "noload" loader strategy would fail
to function for a many-to-one relationship. The loader used an
@@ -421,7 +444,7 @@
.. change::
:tags: bug, sybase
:tickets: 3508, 3509
- :versions: 1.1.0b1
+
Fixed two issues regarding Sybase reflection, allowing tables
without primary keys to be reflected as well as ensured that
@@ -433,7 +456,7 @@
.. change::
:tags: bug, postgresql
:pullreq: github:190
- :versions: 1.1.0b1
+
An adjustment to the new Postgresql feature of reflecting storage
options and USING of :ticket:`3455` released in 1.0.6,
diff --git a/doc/build/changelog/changelog_11.rst b/doc/build/changelog/changelog_11.rst
index 83a57ba7c..511b7b8be 100644
--- a/doc/build/changelog/changelog_11.rst
+++ b/doc/build/changelog/changelog_11.rst
@@ -22,6 +22,178 @@
:version: 1.1.0b1
.. change::
+ :tags: bug, sqlite
+ :tickets: 3634
+
+ The workaround for right-nested joins on SQLite, where they are rewritten
+ as subqueries in order to work around SQLite's lack of support for this
+ syntax, is lifted when SQLite version 3.7.16 or greater is detected.
+
+ .. seealso::
+
+ :ref:`change_3634`
+
+ .. change::
+ :tags: bug, sqlite
+ :tickets: 3633
+
+ The workaround for SQLite's unexpected delivery of column names as
+ ``tablename.columnname`` for some kinds of queries is now disabled
+ when SQLite version 3.10.0 or greater is detected.
+
+ .. seealso::
+
+ :ref:`change_3633`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 2349
+
+ Added new parameter :paramref:`.orm.mapper.passive_deletes` to
+ available mapper options. This allows a DELETE to proceed
+ for a joined-table inheritance mapping against the base table only,
+ while allowing for ON DELETE CASCADE to handle deleting the row
+ from the subclass tables.
+
+ .. seealso::
+
+ :ref:`change_2349`
+
+
+ .. change::
+ :tags: bug, sybase
+ :tickets: 2278
+
+ The unsupported Sybase dialect now raises ``NotImplementedError``
+ when attempting to compile a query that includes "offset"; Sybase
+ has no straightforward "offset" feature.
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 3631
+
+ Calling str() on a core SQL construct has been made more "friendly",
+ when the construct contains non-standard SQL elements such as
+ RETURNING, array index operations, or dialect-specific or custom
+ datatypes. A string is now returned in these cases rendering an
+ approximation of the construct (typically the Postgresql-style
+ version of it) rather than raising an error.
+
+ .. seealso::
+
+ :ref:`change_3631`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3630
+
+ Fixed issue where two same-named relationships that refer to
+ a base class and a concrete-inherited subclass would raise an error
+ if those relationships were set up using "backref", while setting up the
+ identical configuration using relationship() instead with the conflicting
+ names would succeed, as is allowed in the case of a concrete mapping.
+
+ .. seealso::
+
+ :ref:`change_3630`
+
+ .. change::
+ :tags: feature, orm
+ :tickets: 3081
+
+ The ``str()`` call for :class:`.Query` will now take into account
+ the :class:`.Engine` to which the :class:`.Session` is bound, when
+ generating the string form of the SQL, so that the actual SQL
+ that would be emitted to the database is shown, if possible. Previously,
+ only the engine associated with the :class:`.MetaData` to which the
+ mappings are associated would be used, if present. If
+ no bind can be located either on the :class:`.Session` or on
+ the :class:`.MetaData` to which the mappings are associated, then
+ the "default" dialect is used to render the SQL, as was the case
+ previously.
+
+ .. seealso::
+
+ :ref:`change_3081`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3501
+
+ A deep improvement to the recently added :meth:`.TextClause.columns`
+ method, and its interaction with result-row processing, now allows
+ the columns passed to the method to be positionally matched with the
+ result columns in the statement, rather than matching on name alone.
+ The advantage to this includes that when linking a textual SQL statement
+ to an ORM or Core table model, no system of labeling or de-duping of
+ common column names needs to occur, which also means there's no need
+ to worry about how label names match to ORM columns and so-forth. In
+ addition, the :class:`.ResultProxy` has been further enhanced to
+ map column and string keys to a row with greater precision in some
+ cases.
+
+ .. seealso::
+
+ :ref:`change_3501` - feature overview
+
+ :ref:`behavior_change_3501` - backwards compatibility remarks
+
+ .. change::
+ :tags: feature, engine
+ :tickets: 2685
+
+ Multi-tenancy schema translation for :class:`.Table` objects is added.
+ This supports the use case of an application that uses the same set of
+ :class:`.Table` objects in many schemas, such as schema-per-user.
+ A new execution option
+ :paramref:`.Connection.execution_options.schema_translate_map` is
+ added.
+
+ .. seealso::
+
+ :ref:`change_2685`
+
+ .. change::
+ :tags: feature, engine
+ :tickets: 3536
+
+ Added a new entrypoint system to the engine to allow "plugins" to
+ be stated in the query string for a URL. Custom plugins can
+ be written which will be given the chance up front to alter and/or
+ consume the engine's URL and keyword arguments, and then at engine
+ create time will be given the engine itself to allow additional
+ modifications or event registration. Plugins are written as a
+ subclass of :class:`.CreateEnginePlugin`; see that class for
+ details.
+
+ .. change::
+ :tags: feature, mysql
+ :tickets: 3547
+
+ Added :class:`.mysql.JSON` for MySQL 5.7. The JSON type provides
+ persistence of JSON values in MySQL as well as basic operator support
+ of "getitem" and "getpath", making use of the ``JSON_EXTRACT``
+ function in order to refer to individual paths in a JSON structure.
+
+ .. seealso::
+
+ :ref:`change_3547`
+
+ .. change::
+ :tags: feature, sql
+ :tickets: 3619
+
+ Added a new type to core :class:`.types.JSON`. This is the
+ base of the PostgreSQL :class:`.postgresql.JSON` type as well as that
+ of the new :class:`.mysql.JSON` type, so that a PG/MySQL-agnostic
+ JSON column may be used. The type features basic index and path
+ searching support.
+
+ .. seealso::
+
+ :ref:`change_3619`
+
+ .. change::
:tags: bug, sql
:tickets: 3616
@@ -275,7 +447,7 @@
:tickets: 3132
Added support for the SQL-standard function :class:`.array_agg`,
- which automatically returns an :class:`.Array` of the correct type
+ which automatically returns an :class:`.postgresql.ARRAY` of the correct type
and supports index / slice operations, as well as
:func:`.postgresql.array_agg`, which returns a :class:`.postgresql.ARRAY`
with additional comparison features. As arrays are only
@@ -292,8 +464,8 @@
:tags: feature, sql
:tickets: 3516
- Added a new type to core :class:`.types.Array`. This is the
- base of the PostgreSQL :class:`.ARRAY` type, and is now part of Core
+ Added a new type to core :class:`.types.ARRAY`. This is the
+ base of the PostgreSQL :class:`.postgresql.ARRAY` type, and is now part of Core
to begin supporting various SQL-standard array-supporting features
including some functions and eventual support for native arrays
on other databases that have an "array" concept, such as DB2 or Oracle.
@@ -399,7 +571,7 @@
:tickets: 3514
Additional fixes have been made regarding the value of ``None``
- in conjunction with the Postgresql :class:`.JSON` type. When
+ in conjunction with the Postgresql :class:`.postgresql.JSON` type. When
the :paramref:`.JSON.none_as_null` flag is left at its default
value of ``False``, the ORM will now correctly insert the Json
"'null'" string into the column whenever the value on the ORM
diff --git a/doc/build/changelog/migration_09.rst b/doc/build/changelog/migration_09.rst
index b07aed925..913815794 100644
--- a/doc/build/changelog/migration_09.rst
+++ b/doc/build/changelog/migration_09.rst
@@ -1125,7 +1125,7 @@ as INNER JOINs could always be flattened)::
SELECT a.*, b.*, c.* FROM a LEFT OUTER JOIN (b JOIN c ON b.id = c.id) ON a.id
-This was due to the fact that SQLite, even today, cannot parse a statement of the above format::
+This was due to the fact that SQLite up until version **3.7.16** cannot parse a statement of the above format::
SQLite version 3.7.15.2 2013-01-09 11:53:05
Enter ".help" for instructions
@@ -1248,6 +1248,12 @@ with the above queries rewritten as::
JOIN item ON item.id = order_item_1.item_id AND item.type IN (?)
) AS anon_1 ON "order".id = anon_1.order_item_1_order_id
+.. note::
+
+ As of SQLAlchemy 1.1, the workarounds present in this feature for SQLite
+ will automatically disable themselves when SQLite version **3.7.16**
+ or greater is detected, as SQLite has repaired support for right-nested joins.
+
The :meth:`.Join.alias`, :func:`.aliased` and :func:`.with_polymorphic` functions now
support a new argument, ``flat=True``, which is used to construct aliases of joined-table
entities without embedding into a SELECT. This flag is not on by default, to help with
diff --git a/doc/build/changelog/migration_11.rst b/doc/build/changelog/migration_11.rst
index b5889c763..3be758226 100644
--- a/doc/build/changelog/migration_11.rst
+++ b/doc/build/changelog/migration_11.rst
@@ -16,7 +16,7 @@ What's New in SQLAlchemy 1.1?
some issues may be moved to later milestones in order to allow
for a timely release.
- Document last updated: December 4, 2015
+ Document last updated: January 19, 2016
Introduction
============
@@ -38,8 +38,8 @@ SQLAlchemy's ``setup.py`` file has for many years supported operation
both with Setuptools installed and without; supporting a "fallback" mode
that uses straight Distutils. As a Setuptools-less Python environment is
now unheard of, and in order to support the featureset of Setuptools
-more fully, in particular to support py.test's integration with it,
-``setup.py`` now depends on Setuptools fully.
+more fully, in particular to support py.test's integration with it as well
+as things like "extras", ``setup.py`` now depends on Setuptools fully.
.. seealso::
@@ -290,6 +290,125 @@ time on the outside of the subquery.
:ticket:`3582`
+.. _change_2349:
+
+passive_deletes feature for joined-inheritance mappings
+-------------------------------------------------------
+
+A joined-table inheritance mapping may now allow a DELETE to proceed
+as a result of :meth:`.Session.delete`, which only emits DELETE for the
+base table, and not the subclass table, allowing configured ON DELETE CASCADE
+to take place for the configured foreign keys. This is configured using
+the :paramref:`.orm.mapper.passive_deletes` option::
+
+ from sqlalchemy import Column, Integer, String, ForeignKey, create_engine
+ from sqlalchemy.orm import Session
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+
+ class A(Base):
+ __tablename__ = "a"
+ id = Column('id', Integer, primary_key=True)
+ type = Column(String)
+
+ __mapper_args__ = {
+ 'polymorphic_on': type,
+ 'polymorphic_identity': 'a',
+ 'passive_deletes': True
+ }
+
+
+ class B(A):
+ __tablename__ = 'b'
+ b_table_id = Column('b_table_id', Integer, primary_key=True)
+ bid = Column('bid', Integer, ForeignKey('a.id', ondelete="CASCADE"))
+ data = Column('data', String)
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'b'
+ }
+
+With the above mapping, the :paramref:`.orm.mapper.passive_deletes` option
+is configured on the base mapper; it takes effect for all non-base mappers
+that are descendants of the mapper with the option set. A DELETE for
+an object of type ``B`` no longer needs to retrieve the primary key value
+of ``b_table_id`` if unloaded, nor does it need to emit a DELETE statement
+for the table itself::
+
+ session.delete(some_b)
+ session.commit()
+
+Will emit SQL as::
+
+ DELETE FROM a WHERE a.id = %(id)s
+ {'id': 1}
+ COMMIT
+
+As always, the target database must have foreign key support with
+ON DELETE CASCADE enabled.
+
+:ticket:`2349`
+
+.. _change_3630:
+
+Same-named backrefs will not raise an error when applied to concrete inheritance subclasses
+-------------------------------------------------------------------------------------------
+
+The following mapping has always been possible without issue::
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+ b = relationship("B", foreign_keys="B.a_id", backref="a")
+
+ class A1(A):
+ __tablename__ = 'a1'
+ id = Column(Integer, primary_key=True)
+ b = relationship("B", foreign_keys="B.a1_id", backref="a1")
+ __mapper_args__ = {'concrete': True}
+
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+
+ a_id = Column(ForeignKey('a.id'))
+ a1_id = Column(ForeignKey('a1.id'))
+
+Above, even though class ``A`` and class ``A1`` have a relationship
+named ``b``, no conflict warning or error occurs because class ``A1`` is
+marked as "concrete".
+
+However, if the relationships were configured the other way, an error
+would occur::
+
+ class A(Base):
+ __tablename__ = 'a'
+ id = Column(Integer, primary_key=True)
+
+
+ class A1(A):
+ __tablename__ = 'a1'
+ id = Column(Integer, primary_key=True)
+ __mapper_args__ = {'concrete': True}
+
+
+ class B(Base):
+ __tablename__ = 'b'
+ id = Column(Integer, primary_key=True)
+
+ a_id = Column(ForeignKey('a.id'))
+ a1_id = Column(ForeignKey('a1.id'))
+
+ a = relationship("A", backref="b")
+ a1 = relationship("A1", backref="b")
+
+The fix enhances the backref feature so that an error is not emitted,
+as well as an additional check within the mapper logic to bypass warning
+for an attribute being replaced.
+
+:ticket:`3630`
.. _change_3601:
@@ -344,6 +463,30 @@ would have to be compared during the merge.
:ticket:`3601`
+.. _change_3081:
+
+Stringify of Query will consult the Session for the correct dialect
+-------------------------------------------------------------------
+
+Calling ``str()`` on a :class:`.Query` object will consult the :class:`.Session`
+for the correct "bind" to use, in order to render the SQL that would be
+passed to the database. In particular this allows a :class:`.Query` that
+refers to dialect-specific SQL constructs to be renderable, assuming the
+:class:`.Query` is associated with an appropriate :class:`.Session`.
+Previously, this behavior would only take effect if the :class:`.MetaData`
+to which the mappings were associated were itself bound to the target
+:class:`.Engine`.
+
+If neither the underlying :class:`.MetaData` nor the :class:`.Session` are
+associated with any bound :class:`.Engine`, then the fallback to the
+"default" dialect is used to generate the SQL string.
+
+.. seealso::
+
+ :ref:`change_3631`
+
+:ticket:`3081`
+
New Features and Improvements - Core
====================================
@@ -445,6 +588,120 @@ will not have much impact on the behavior of the column during an INSERT.
:ticket:`3216`
+.. _change_3501:
+
+ResultSet column matching enhancements; positional column setup for textual SQL
+-------------------------------------------------------------------------------
+
+A series of improvements were made to the :class:`.ResultProxy` system
+in the 1.0 series as part of :ticket:`918`, which reorganizes the internals
+to match cursor-bound result columns with table/ORM metadata positionally,
+rather than by matching names, for compiled SQL constructs that contain full
+information about the result rows to be returned. This allows a dramatic savings
+on Python overhead as well as much greater accuracy in linking ORM and Core
+SQL expressions to result rows. In 1.1, this reorganization has been taken
+further internally, and also has been made available to pure-text SQL
+constructs via the use of the recently added :meth:`.TextClause.columns` method.
+
+TextAsFrom.columns() now works positionally
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The :meth:`.TextClause.columns` method, added in 0.9, accepts column-based arguments
+positionally; in 1.1, when all columns are passed positionally, the correlation
+of these columns to the ultimate result set is also performed positionally.
+The key advantage here is that textual SQL can now be linked to an ORM-
+level result set without the need to deal with ambiguous or duplicate column
+names, or with having to match labeling schemes to ORM-level labeling schemes. All
+that's needed now is the same ordering of columns within the textual SQL
+and the column arguments passed to :meth:`.TextClause.columns`::
+
+
+ from sqlalchemy import text
+ stmt = text("SELECT users.id, addresses.id, users.id, "
+ "users.name, addresses.email_address AS email "
+ "FROM users JOIN addresses ON users.id=addresses.user_id "
+ "WHERE users.id = 1").columns(
+ User.id,
+ Address.id,
+ Address.user_id,
+ User.name,
+ Address.email_address
+ )
+
+ query = session.query(User).from_statement(text).\
+ options(contains_eager(User.addresses))
+ result = query.all()
+
+Above, the textual SQL contains the column "id" three times, which would
+normally be ambiguous. Using the new feature, we can apply the mapped
+columns from the ``User`` and ``Address`` class directly, even linking
+the ``Address.user_id`` column to the ``users.id`` column in textual SQL
+for fun, and the :class:`.Query` object will receive rows that are correctly
+targetable as needed, including for an eager load.
+
+This change is **backwards incompatible** with code that passes the columns
+to the method with a different ordering than is present in the textual statement.
+It is hoped that this impact will be low due to the fact that this
+method has always been documented illustrating the columns being passed in the same order as that of the
+textual SQL statement, as would seem intuitive, even though the internals
+weren't checking for this. The method itself was only added as of 0.9 in
+any case and may not yet have widespread use. Notes on exactly how to handle
+this behavioral change for applications using it are at :ref:`behavior_change_3501`.
+
+.. seealso::
+
+ :ref:`sqlexpression_text_columns` - in the Core tutorial
+
+ :ref:`behavior_change_3501` - backwards compatibility remarks
+
+Positional matching is trusted over name-based matching for Core/ORM SQL constructs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Another aspect of this change is that the rules for matching columns have also been modified
+to rely upon "positional" matching more fully for compiled SQL constructs
+as well. Given a statement like the following::
+
+ ua = users.alias('ua')
+ stmt = select([users.c.user_id, ua.c.user_id])
+
+The above statement will compile to::
+
+ SELECT users.user_id, ua.user_id FROM users, users AS ua
+
+In 1.0, the above statement when executed would be matched to its original
+compiled construct using positional matching, however because the statement
+contains the ``'user_id'`` label duplicated, the "ambiguous column" rule
+would still get involved and prevent the columns from being fetched from a row.
+As of 1.1, the "ambiguous column" rule does not affect an exact match from
+a column construct to the SQL column, which is what the ORM uses to
+fetch columns::
+
+ result = conn.execute(stmt)
+ row = result.first()
+
+ # these both match positionally, so no error
+ user_id = row[users.c.user_id]
+ ua_id = row[ua.c.user_id]
+
+ # this still raises, however
+ user_id = row['user_id']
+
+Much less likely to get an "ambiguous column" error message
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As part of this change, the wording of the error message ``Ambiguous column
+name '<name>' in result set! try 'use_labels' option on select statement.``
+has been dialed back; as this message should now be extremely rare when using
+the ORM or Core compiled SQL constructs, it merely states
+``Ambiguous column name '<name>' in result set column descriptions``, and
+only when a result column is retrieved using the string name that is actually
+ambiguous, e.g. ``row['user_id']`` in the above example. It also now refers
+to the actual ambiguous name from the rendered SQL statement itself,
+rather than indicating the key or name that was local to the construct being
+used for the fetch.
+
+:ticket:`3501`
+
.. _change_2528:
A UNION or similar of SELECTs with LIMIT/OFFSET/ORDER BY now parenthesizes the embedded selects
@@ -502,33 +759,138 @@ UNIONs with parenthesized SELECT statements is much less common than the
:ticket:`2528`
+.. _change_3619:
+
+JSON support added to Core
+--------------------------
+
+As MySQL now has a JSON datatype in addition to the Postgresql JSON datatype,
+the core now gains a :class:`sqlalchemy.types.JSON` datatype that is the basis
+for both of these. Using this type allows access to the "getitem" operator
+as well as the "getpath" operator in a way that is agnostic across Postgresql
+and MySQL.
+
+The new datatype also has a series of improvements to the handling of
+NULL values as well as expression handling.
+
+.. seealso::
+
+ :ref:`change_3547`
+
+ :class:`.types.JSON`
+
+ :class:`.postgresql.JSON`
+
+ :class:`.mysql.JSON`
+
+:ticket:`3619`
+
+.. _change_3514:
+
+JSON "null" is inserted as expected with ORM operations, regardless of column default present
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The :class:`.types.JSON` type and its descendant types :class:`.postgresql.JSON`
+and :class:`.mysql.JSON` have a flag :paramref:`.types.JSON.none_as_null` which
+when set to True indicates that the Python value ``None`` should translate
+into a SQL NULL rather than a JSON NULL value. This flag defaults to False,
+which means that the column should *never* insert SQL NULL or fall back
+to a default unless the :func:`.null` constant were used. However, this would
+fail in the ORM under two circumstances; one is when the column also contained
+a default or server_default value, a positive value of ``None`` on the mapped
+attribute would still result in the column-level default being triggered,
+replacing the ``None`` value::
+
+ obj = MyObject(json_value=None)
+ session.add(obj)
+ session.commit() # would fire off default / server_default, not encode "'none'"
+
+The other is when the :meth:`.Session.bulk_insert_mappings`
+method were used, ``None`` would be ignored in all cases::
+
+ session.bulk_insert_mappings(
+ MyObject,
+ [{"json_value": None}]) # would insert SQL NULL and/or trigger defaults
+
+The :class:`.types.JSON` type now implements the
+:attr:`.TypeEngine.should_evaluate_none` flag,
+indicating that ``None`` should not be ignored here; it is configured
+automatically based on the value of :paramref:`.types.JSON.none_as_null`.
+Thanks to :ticket:`3061`, we can differentiate when the value ``None`` is actively
+set by the user versus when it was never set at all.
+
+If the attribute is not set at all, then column level defaults *will*
+fire off and/or SQL NULL will be inserted as expected, as was the behavior
+previously. Below, the two variants are illustrated::
+
+ obj = MyObject(json_value=None)
+ session.add(obj)
+ session.commit() # *will not* fire off column defaults, will insert JSON 'null'
+
+ obj = MyObject()
+ session.add(obj)
+ session.commit() # *will* fire off column defaults, and/or insert SQL NULL
+
+The feature applies as well to the new base :class:`.types.JSON` type
+and its descendant types.
+
+:ticket:`3514`
+
+.. _change_3514_jsonnull:
+
+New JSON.NULL Constant Added
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+To ensure that an application can always have full control at the value level
+of whether a :class:`.types.JSON`, :class:`.postgresql.JSON`, :class:`.mysql.JSON`,
+or :class:`.postgresql.JSONB` column
+should receive a SQL NULL or JSON ``"null"`` value, the constant
+:attr:`.types.JSON.NULL` has been added, which in conjunction with
+:func:`.null` can be used to determine fully between SQL NULL and
+JSON ``"null"``, regardless of what :paramref:`.types.JSON.none_as_null` is set
+to::
+
+ from sqlalchemy import null
+ from sqlalchemy.dialects.postgresql import JSON
+
+ obj1 = MyObject(json_value=null()) # will *always* insert SQL NULL
+ obj2 = MyObject(json_value=JSON.NULL) # will *always* insert JSON string "null"
+
+ session.add_all([obj1, obj2])
+ session.commit()
+
+The feature applies as well to the new base :class:`.types.JSON` type
+and its descendant types.
+
+:ticket:`3514`
+
.. _change_3516:
Array support added to Core; new ANY and ALL operators
------------------------------------------------------
-Along with the enhancements made to the Postgresql :class:`.ARRAY`
-type described in :ref:`change_3503`, the base class of :class:`.ARRAY`
-itself has been moved to Core in a new class :class:`.types.Array`.
+Along with the enhancements made to the Postgresql :class:`.postgresql.ARRAY`
+type described in :ref:`change_3503`, the base class of :class:`.postgresql.ARRAY`
+itself has been moved to Core in a new class :class:`.types.ARRAY`.
Arrays are part of the SQL standard, as are several array-oriented functions
such as ``array_agg()`` and ``unnest()``. In support of these constructs
for not just PostgreSQL but also potentially for other array-capable backends
in the future such as DB2, the majority of array logic for SQL expressions
-is now in Core. The :class:`.Array` type still **only works on
+is now in Core. The :class:`.types.ARRAY` type still **only works on
Postgresql**, however it can be used directly, supporting special array
use cases such as indexed access, as well as support for the ANY and ALL::
mytable = Table("mytable", metadata,
- Column("data", Array(Integer, dimensions=2))
+ Column("data", ARRAY(Integer, dimensions=2))
)
expr = mytable.c.data[5][6]
expr = mytable.c.data[5].any(12)
-In support of ANY and ALL, the :class:`.Array` type retains the same
-:meth:`.Array.Comparator.any` and :meth:`.Array.Comparator.all` methods
+In support of ANY and ALL, the :class:`.types.ARRAY` type retains the same
+:meth:`.types.ARRAY.Comparator.any` and :meth:`.types.ARRAY.Comparator.all` methods
from the PostgreSQL type, but also exports these operations to new
standalone operator functions :func:`.sql.expression.any_` and
:func:`.sql.expression.all_`. These two functions work in more
@@ -541,7 +903,7 @@ as::
For the PostgreSQL-specific operators "contains", "contained_by", and
"overlaps", one should continue to use the :class:`.postgresql.ARRAY`
-type directly, which provides all functionality of the :class:`.Array`
+type directly, which provides all functionality of the :class:`.types.ARRAY`
type as well.
The :func:`.sql.expression.any_` and :func:`.sql.expression.all_` operators
@@ -564,7 +926,7 @@ such as::
New Function features, "WITHIN GROUP", array_agg and set aggregate functions
----------------------------------------------------------------------------
-With the new :class:`.Array` type we can also implement a pre-typed
+With the new :class:`.types.ARRAY` type we can also implement a pre-typed
function for the ``array_agg()`` SQL function that returns an array,
which is now available using :class:`.array_agg`::
@@ -643,6 +1005,69 @@ can be done like any other type::
:ticket:`2919`
+.. _change_2685:
+
+Multi-Tenancy Schema Translation for Table objects
+--------------------------------------------------
+
+To support the use case of an application that uses the same set of
+:class:`.Table` objects in many schemas, such as schema-per-user, a new
+execution option :paramref:`.Connection.execution_options.schema_translate_map`
+is added. Using this mapping, a set of :class:`.Table`
+objects can be made on a per-connection basis to refer to any set of schemas
+instead of the :paramref:`.Table.schema` to which they were assigned. The
+translation works for DDL and SQL generation, as well as with the ORM.
+
+For example, if the ``User`` class were assigned the schema "per_user"::
+
+ class User(Base):
+ __tablename__ = 'user'
+ id = Column(Integer, primary_key=True)
+
+ __table_args__ = {'schema': 'per_user'}
+
+On each request, the :class:`.Session` can be set up to refer to a
+different schema each time::
+
+ session = Session()
+ session.connection(execution_options={
+ "schema_translate_map": {"per_user": "account_one"}})
+
+ # will query from the ``account_one.user`` table
+ session.query(User).get(5)
+
+.. seealso::
+
+ :ref:`schema_translating`
+
+:ticket:`2685`
+
+.. _change_3631:
+
+"Friendly" stringification of Core SQL constructs without a dialect
+-------------------------------------------------------------------
+
+Calling ``str()`` on a Core SQL construct will now produce a string
+in more cases than before, supporting various SQL constructs not normally
+present in default SQL such as RETURNING, array indexes, and non-standard
+datatypes::
+
+ >>> from sqlalchemy import table, column
+ t>>> t = table('x', column('a'), column('b'))
+ >>> print(t.insert().returning(t.c.a, t.c.b))
+ INSERT INTO x (a, b) VALUES (:a, :b) RETURNING x.a, x.b
+
+The ``str()`` function now calls upon an entirely separate dialect / compiler
+intended just for plain string printing without a specific dialect set up,
+so as more "just show me a string!" cases come up, these can be added
+to this dialect/compiler without impacting behaviors on real dialects.
+
+.. seealso::
+
+ :ref:`change_3081`
+
+:ticket:`3631`
+
.. _change_3531:
The type_coerce function is now a persistent SQL element
@@ -754,6 +1179,54 @@ Key Behavioral Changes - ORM
Key Behavioral Changes - Core
=============================
+.. _behavior_change_3501:
+
+TextClause.columns() will match columns positionally, not by name, when passed positionally
+-------------------------------------------------------------------------------------------
+
+The new behavior of the :meth:`.TextClause.columns` method, which itself
+was recently added as of the 0.9 series, is that when
+columns are passed positionally without any additional keyword arguments,
+they are linked to the ultimate result set
+columns positionally, and no longer on name. It is hoped that the impact
+of this change will be low due to the fact that the method has always been documented
+illustrating the columns being passed in the same order as that of the
+textual SQL statement, as would seem intuitive, even though the internals
+weren't checking for this.
+
+An application that is using this method by passing :class:`.Column` objects
+to it positionally must ensure that the position of those :class:`.Column`
+objects matches the position in which these columns are stated in the
+textual SQL.
+
+E.g., code like the following::
+
+ stmt = text("SELECT id, name, description FROM table")
+
+ # no longer matches by name
+ stmt = stmt.columns(my_table.c.name, my_table.c.description, my_table.c.id)
+
+Would no longer work as expected; the order of the columns given is now
+significant::
+
+ # correct version
+ stmt = stmt.columns(my_table.c.id, my_table.c.name, my_table.c.description)
+
+Possibly more likely, a statement that worked like this::
+
+ stmt = text("SELECT * FROM table")
+ stmt = stmt.columns(my_table.c.id, my_table.c.name, my_table.c.description)
+
+is now slightly risky, as the "*" specification will generally deliver columns
+in the order in which they are present in the table itself. If the structure
+of the table changes due to schema changes, this ordering may no longer be the same.
+Therefore when using :meth:`.TextClause.columns`, it's advised to list out
+the desired columns explicitly in the textual SQL, though it's no longer
+necessary to worry about the names themselves in the textual SQL.
+
+.. seealso::
+
+ :ref:`change_3501`
Dialect Improvements and Changes - Postgresql
=============================================
@@ -767,8 +1240,9 @@ As described in :ref:`change_3499`, the ORM relies upon being able to
produce a hash function for column values when a query's selected entities
mixes full ORM entities with column expressions. The ``hashable=False``
flag is now correctly set on all of PG's "data structure" types, including
-:class:`.ARRAY` and :class:`.JSON`. The :class:`.JSONB` and :class:`.HSTORE`
-types already included this flag. For :class:`.ARRAY`,
+:class:`.postgresql.ARRAY` and :class:`.postgresql.JSON`.
+The :class:`.JSONB` and :class:`.HSTORE`
+types already included this flag. For :class:`.postgresql.ARRAY`,
this is conditional based on the :paramref:`.postgresql.ARRAY.as_tuple`
flag, however it should no longer be necessary to set this flag
in order to have an array value present in a composed ORM row.
@@ -840,7 +1314,7 @@ The JSON cast() operation now requires ``.astext`` is called explicitly
As part of the changes in :ref:`change_3503`, the workings of the
:meth:`.ColumnElement.cast` operator on :class:`.postgresql.JSON` and
:class:`.postgresql.JSONB` no longer implictly invoke the
-:attr:`.JSON.Comparator.astext` modifier; Postgresql's JSON/JSONB types
+:attr:`.postgresql.JSON.Comparator.astext` modifier; Postgresql's JSON/JSONB types
support CAST operations to each other without the "astext" aspect.
This means that in most cases, an application that was doing this::
@@ -852,88 +1326,6 @@ Will now need to change to this::
expr = json_col['somekey'].astext.cast(Integer)
-
-.. _change_3514:
-
-Postgresql JSON "null" is inserted as expected with ORM operations, regardless of column default present
------------------------------------------------------------------------------------------------------------
-
-The :class:`.JSON` type has a flag :paramref:`.JSON.none_as_null` which
-when set to True indicates that the Python value ``None`` should translate
-into a SQL NULL rather than a JSON NULL value. This flag defaults to False,
-which means that the column should *never* insert SQL NULL or fall back
-to a default unless the :func:`.null` constant were used. However, this would
-fail in the ORM under two circumstances; one is when the column also contained
-a default or server_default value, a positive value of ``None`` on the mapped
-attribute would still result in the column-level default being triggered,
-replacing the ``None`` value::
-
- obj = MyObject(json_value=None)
- session.add(obj)
- session.commit() # would fire off default / server_default, not encode "'none'"
-
-The other is when the :meth:`.Session.bulk_insert_mappings`
-method were used, ``None`` would be ignored in all cases::
-
- session.bulk_insert_mappings(
- MyObject,
- [{"json_value": None}]) # would insert SQL NULL and/or trigger defaults
-
-The :class:`.JSON` type now implements the
-:attr:`.TypeEngine.should_evaluate_none` flag,
-indicating that ``None`` should not be ignored here; it is configured
-automatically based on the value of :paramref:`.JSON.none_as_null`.
-Thanks to :ticket:`3061`, we can differentiate when the value ``None`` is actively
-set by the user versus when it was never set at all.
-
-If the attribute is not set at all, then column level defaults *will*
-fire off and/or SQL NULL will be inserted as expected, as was the behavior
-previously. Below, the two variants are illustrated::
-
- obj = MyObject(json_value=None)
- session.add(obj)
- session.commit() # *will not* fire off column defaults, will insert JSON 'null'
-
- obj = MyObject()
- session.add(obj)
- session.commit() # *will* fire off column defaults, and/or insert SQL NULL
-
-:ticket:`3514`
-
-.. seealso::
-
- :ref:`change_3250`
-
- :ref:`change_3514_jsonnull`
-
-.. _change_3514_jsonnull:
-
-New JSON.NULL Constant Added
-----------------------------
-
-To ensure that an application can always have full control at the value level
-of whether a :class:`.postgresql.JSON` or :class:`.postgresql.JSONB` column
-should receive a SQL NULL or JSON ``"null"`` value, the constant
-:attr:`.postgresql.JSON.NULL` has been added, which in conjunction with
-:func:`.null` can be used to determine fully between SQL NULL and
-JSON ``"null"``, regardless of what :paramref:`.JSON.none_as_null` is set
-to::
-
- from sqlalchemy import null
- from sqlalchemy.dialects.postgresql import JSON
-
- obj1 = MyObject(json_value=null()) # will *always* insert SQL NULL
- obj2 = MyObject(json_value=JSON.NULL) # will *always* insert JSON string "null"
-
- session.add_all([obj1, obj2])
- session.commit()
-
-.. seealso::
-
- :ref:`change_3514`
-
-:ticket:`3514`
-
.. _change_2729:
ARRAY with ENUM will now emit CREATE TYPE for the ENUM
@@ -975,6 +1367,25 @@ emits::
Dialect Improvements and Changes - MySQL
=============================================
+.. _change_3547:
+
+MySQL JSON Support
+------------------
+
+A new type :class:`.mysql.JSON` is added to the MySQL dialect supporting
+the JSON type newly added to MySQL 5.7. This type provides both persistence
+of JSON as well as rudimentary indexed-access using the ``JSON_EXTRACT``
+function internally. An indexable JSON column that works across MySQL
+and Postgresql can be achieved by using the :class:`.types.JSON` datatype
+common to both MySQL and Postgresql.
+
+.. seealso::
+
+ :ref:`change_3619`
+
+:ticket:`3547`
+
+
.. _change_mysql_3216:
No more generation of an implicit KEY for composite primary key w/ AUTO_INCREMENT
@@ -1031,16 +1442,63 @@ directives are no longer needed::
Dialect Improvements and Changes - SQLite
=============================================
+.. _change_3634:
+
+Right-nested join workaround lifted for SQLite version 3.7.16
+-------------------------------------------------------------
+
+In version 0.9, the feature introduced by :ref:`feature_joins_09` went
+through lots of effort to support rewriting of joins on SQLite to always
+use subqueries in order to achieve a "right-nested-join" effect, as
+SQLite has not supported this syntax for many years. Ironically,
+the version of SQLite noted in that migration note, 3.7.15.2, was the *last*
+version of SQLite to actually have this limitation! The next release was
+3.7.16 and support for right nested joins was quietly added. In 1.1, the work
+to identify the specific SQLite version and source commit where this change
+was made was done (SQlite's changelog refers to it with the cryptic phrase "Enhance
+the query optimizer to exploit transitive join constraints" without linking
+to any issue number, change number, or further explanation), and the workarounds
+present in this change are now lifted for SQLite when the DBAPI reports
+that version 3.7.16 or greater is in effect.
+
+:ticket:`3634`
+
+.. _change_3633:
+
+Dotted column names workaround lifted for SQLite version 3.10.0
+---------------------------------------------------------------
+
+The SQLite dialect has long had a workaround for an issue where the database
+driver does not report the correct column names for some SQL result sets, in
+particular when UNION is used. The workaround is detailed at
+:ref:`sqlite_dotted_column_names`, and requires that SQLAlchemy assume that any
+column name with a dot in it is actually a ``tablename.columnname`` combination
+delivered via this buggy behavior, with an option to turn it off via the
+``sqlite_raw_colnames`` execution option.
+
+As of SQLite version 3.10.0, the bug in UNION and other queries has been fixed;
+like the change described in :ref:`change_3634`, SQLite's changelog only
+identifies it cryptically as "Added the colUsed field to sqlite3_index_info for
+use by the sqlite3_module.xBestIndex method", however SQLAlchemy's translation
+of these dotted column names is no longer required with this version, so is
+turned off when version 3.10.0 or greater is detected.
+
+Overall, the SQLAlchemy :class:`.ResultProxy` as of the 1.0 series relies much
+less on column names in result sets when delivering results for Core and ORM
+SQL constructs, so the importance of this issue was already lessened in any
+case.
+
+:ticket:`3633`
+
.. _change_sqlite_schemas:
Improved Support for Remote Schemas
-------------------------------------
-
+-----------------------------------
The SQLite dialect now implements :meth:`.Inspector.get_schema_names`
and additionally has improved support for tables and indexes that are
created and reflected from a remote schema, which in SQLite is a
-database that is assigned a name via the ``ATTACH`` statement; previously,
-the ``CREATE INDEX`` DDL didn't work correctly for a schema-bound table
+dataase that is assigned a name via the ``ATTACH`` statement; previously,
+the``CREATE INDEX`` DDL didn't work correctly for a schema-bound table
and the :meth:`.Inspector.get_foreign_keys` method will now indicate the
given schema in the results. Cross-schema foreign keys aren't supported.
diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst
index 72e1d6a61..709642ecf 100644
--- a/doc/build/core/connections.rst
+++ b/doc/build/core/connections.rst
@@ -368,6 +368,69 @@ the SQL statement. When the :class:`.ResultProxy` is closed, the underlying
:class:`.Connection` is closed for us, resulting in the
DBAPI connection being returned to the pool with transactional resources removed.
+.. _schema_translating:
+
+Translation of Schema Names
+===========================
+
+To support multi-tenancy applications that distribute common sets of tables
+into multiple schemas, the
+:paramref:`.Connection.execution_options.schema_translate_map`
+execution option may be used to repurpose a set of :class:`.Table` objects
+to render under different schema names without any changes.
+
+Given a table::
+
+ user_table = Table(
+ 'user', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50))
+ )
+
+The "schema" of this :class:`.Table` as defined by the
+:paramref:`.Table.schema` attribute is ``None``. The
+:paramref:`.Connection.execution_options.schema_translate_map` can specify
+that all :class:`.Table` objects with a schema of ``None`` would instead
+render the schema as ``user_schema_one``::
+
+ connection = engine.connect().execution_options(
+ schema_translate_map={None: "user_schema_one"})
+
+ result = connection.execute(user_table.select())
+
+The above code will invoke SQL on the database of the form::
+
+ SELECT user_schema_one.user.id, user_schema_one.user.name FROM
+ user_schema.user
+
+That is, the schema name is substituted with our translated name. The
+map can specify any number of target->destination schemas::
+
+ connection = engine.connect().execution_options(
+ schema_translate_map={
+ None: "user_schema_one", # no schema name -> "user_schema_one"
+ "special": "special_schema", # schema="special" becomes "special_schema"
+ "public": None # Table objects with schema="public" will render with no schema
+ })
+
+The :paramref:`.Connection.execution_options.schema_translate_map` parameter
+affects all DDL and SQL constructs generated from the SQL expression language,
+as derived from the :class:`.Table` or :class:`.Sequence` objects.
+It does **not** impact literal string SQL used via the :func:`.expression.text`
+construct nor via plain strings passed to :meth:`.Connection.execute`.
+
+The feature takes effect **only** in those cases where the name of the
+schema is derived directly from that of a :class:`.Table` or :class:`.Sequence`;
+it does not impact methods where a string schema name is passed directly.
+By this pattern, it takes effect within the "can create" / "can drop" checks
+performed by methods such as :meth:`.MetaData.create_all` or
+:meth:`.MetaData.drop_all` are called, and it takes effect when
+using table reflection given a :class:`.Table` object. However it does
+**not** affect the operations present on the :class:`.Inspector` object,
+as the schema name is passed to these methods explicitly.
+
+.. versionadded:: 1.1
+
.. _engine_disposal:
Engine Disposal
@@ -656,6 +719,9 @@ Connection / Engine API
.. autoclass:: Connectable
:members:
+.. autoclass:: CreateEnginePlugin
+ :members:
+
.. autoclass:: Engine
:members:
diff --git a/doc/build/core/metadata.rst b/doc/build/core/metadata.rst
index 1eade1c1c..24df3bc49 100644
--- a/doc/build/core/metadata.rst
+++ b/doc/build/core/metadata.rst
@@ -45,7 +45,7 @@ Note also that each column describes its datatype using objects corresponding
to genericized types, such as :class:`~sqlalchemy.types.Integer` and
:class:`~sqlalchemy.types.String`. SQLAlchemy features dozens of types of
varying levels of specificity as well as the ability to create custom types.
-Documentation on the type system can be found at :ref:`types`.
+Documentation on the type system can be found at :ref:`types_toplevel`.
Accessing Tables and Columns
----------------------------
@@ -248,7 +248,7 @@ There are two major migration tools available for SQLAlchemy:
supporting such features as transactional DDL, automatic generation of "candidate"
migrations, an "offline" mode which generates SQL scripts, and support for branch
resolution.
-* `SQLAlchemy-Migrate <http://code.google.com/p/sqlalchemy-migrate/>`_ - The original
+* `SQLAlchemy-Migrate <https://github.com/openstack/sqlalchemy-migrate>`_ - The original
migration tool for SQLAlchemy, SQLAlchemy-Migrate is widely used and continues
under active development. SQLAlchemy-Migrate includes features such as
SQL script generation, ORM class generation, ORM model comparison, and extensive
diff --git a/doc/build/core/selectable.rst b/doc/build/core/selectable.rst
index 03ebeb4ab..e73ce7b64 100644
--- a/doc/build/core/selectable.rst
+++ b/doc/build/core/selectable.rst
@@ -86,3 +86,4 @@ elements are themselves :class:`.ColumnElement` subclasses).
.. autoclass:: TextAsFrom
:members:
+ :inherited-members:
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index 5773cab40..04262ac5e 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -791,35 +791,127 @@ Above, we can see that bound parameters are specified in
:func:`~.expression.text` using the named colon format; this format is
consistent regardless of database backend. To send values in for the
parameters, we passed them into the :meth:`~.Connection.execute` method
-as additional arguments. Depending on how we are working, we can also
-send values to be associated directly with the :func:`~.expression.text`
-construct using the :meth:`~.TextClause.bindparams` method; if we are
-using datatypes that need special handling as they are received in Python,
-or we'd like to compose our :func:`~.expression.text` object into a larger
-expression, we may also wish to use the :meth:`~.TextClause.columns` method
-in order to specify column return types and names:
+as additional arguments.
+
+Specifying Bound Parameter Behaviors
+------------------------------------------
+
+The :func:`~.expression.text` construct supports pre-established bound values
+using the :meth:`.TextClause.bindparams` method::
+
+ stmt = text("SELECT * FROM users WHERE users.name BETWEEN :x AND :y")
+ stmt = stmt.bindparams(x="m", y="z")
+
+The parameters can also be explicitly typed::
+
+ stmt = stmt.bindparams(bindparam("x", String), bindparam("y", String))
+ result = conn.execute(stmt, {"x": "m", "y": "z"})
+
+Typing for bound parameters is necessary when the type requires Python-side
+or special SQL-side processing provided by the datatype.
+
+.. seealso::
+
+ :meth:`.TextClause.bindparams` - full method description
+
+.. _sqlexpression_text_columns:
+
+Specifying Result-Column Behaviors
+----------------------------------------------
+
+We may also specify information about the result columns using the
+:meth:`.TextClause.columns` method; this method can be used to specify
+the return types, based on name::
+
+ stmt = stmt.columns(id=Integer, name=String)
+
+or it can be passed full column expressions positionally, either typed
+or untyped. In this case it's a good idea to list out the columns
+explicitly within our textual SQL, since the correlation of our column
+expressions to the SQL will be done positionally::
+
+ stmt = text("SELECT id, name FROM users")
+ stmt = stmt.columns(users.c.id, users.c.name)
+
+When we call the :meth:`.TextClause.columns` method, we get back a
+:class:`.TextAsFrom` object that supports the full suite of
+:attr:`.TextAsFrom.c` and other "selectable" operations::
+
+ j = stmt.join(addresses, stmt.c.id == addresses.c.user_id)
+
+ new_stmt = select([stmt.c.id, addresses.c.id]).\
+ select_from(j).where(stmt.c.name == 'x')
+
+The positional form of :meth:`.TextClause.columns` is particularly useful
+when relating textual SQL to existing Core or ORM models, because we can use
+column expressions directly without worrying about name conflicts or other issues with the
+result column names in the textual SQL:
.. sourcecode:: pycon+sql
- >>> s = text(
- ... "SELECT users.fullname || ', ' || addresses.email_address AS title "
- ... "FROM users, addresses "
- ... "WHERE users.id = addresses.user_id "
- ... "AND users.name BETWEEN :x AND :y "
- ... "AND (addresses.email_address LIKE :e1 "
- ... "OR addresses.email_address LIKE :e2)")
- >>> s = s.columns(title=String)
- >>> s = s.bindparams(x='m', y='z', e1='%@aol.com', e2='%@msn.com')
- >>> conn.execute(s).fetchall()
- SELECT users.fullname || ', ' || addresses.email_address AS title
- FROM users, addresses
- WHERE users.id = addresses.user_id AND users.name BETWEEN ? AND ? AND
- (addresses.email_address LIKE ? OR addresses.email_address LIKE ?)
- ('m', 'z', '%@aol.com', '%@msn.com')
- {stop}[(u'Wendy Williams, wendy@aol.com',)]
+ >>> stmt = text("SELECT users.id, addresses.id, users.id, "
+ ... "users.name, addresses.email_address AS email "
+ ... "FROM users JOIN addresses ON users.id=addresses.user_id "
+ ... "WHERE users.id = 1").columns(
+ ... users.c.id,
+ ... addresses.c.id,
+ ... addresses.c.user_id,
+ ... users.c.name,
+ ... addresses.c.email_address
+ ... )
+ {sql}>>> result = conn.execute(stmt)
+ SELECT users.id, addresses.id, users.id, users.name,
+ addresses.email_address AS email
+ FROM users JOIN addresses ON users.id=addresses.user_id WHERE users.id = 1
+ ()
+ {stop}
+
+Above, there's three columns in the result that are named "id", but since
+we've associated these with column expressions positionally, the names aren't an issue
+when the result-columns are fetched using the actual column object as a key.
+Fetching the ``email_address`` column would be::
+
+ >>> row = result.fetchone()
+ >>> row[addresses.c.email_address]
+ 'jack@yahoo.com'
+
+If on the other hand we used a string column key, the usual rules of name-
+based matching still apply, and we'd get an ambiguous column error for
+the ``id`` value::
+
+ >>> row["id"]
+ Traceback (most recent call last):
+ ...
+ InvalidRequestError: Ambiguous column name 'id' in result set column descriptions
+
+It's important to note that while accessing columns from a result set using
+:class:`.Column` objects may seem unusual, it is in fact the only system
+used by the ORM, which occurs transparently beneath the facade of the
+:class:`~.orm.query.Query` object; in this way, the :meth:`.TextClause.columns` method
+is typically very applicable to textual statements to be used in an ORM
+context. The example at :ref:`orm_tutorial_literal_sql` illustrates
+a simple usage.
+
+.. versionadded:: 1.1
+
+ The :meth:`.TextClause.columns` method now accepts column expressions
+ which will be matched positionally to a plain text SQL result set,
+ eliminating the need for column names to match or even be unique in the
+ SQL statement when matching table metadata or ORM models to textual SQL.
+
+.. seealso::
+ :meth:`.TextClause.columns` - full method description
-:func:`~.expression.text` can also be used freely within a
+ :ref:`orm_tutorial_literal_sql` - integrating ORM-level queries with
+ :func:`.text`
+
+
+Using text() fragments inside bigger statements
+-----------------------------------------------
+
+:func:`~.expression.text` can also be used to produce fragments of SQL
+that can be freely within a
:func:`~.expression.select` object, which accepts :func:`~.expression.text`
objects as an argument for most of its builder functions.
Below, we combine the usage of :func:`~.expression.text` within a
@@ -850,30 +942,13 @@ need to refer to any pre-established :class:`.Table` metadata:
('%@aol.com', '%@msn.com')
{stop}[(u'Wendy Williams, wendy@aol.com',)]
-.. topic:: Why not use strings everywhere?
-
- When we use literal strings, the Core can't adapt our SQL to work
- on different database backends. Above, our expression won't work
- with MySQL since MySQL doesn't have the ``||`` construct.
- If we only use :func:`.text` to specify columns, our :func:`.select`
- construct will have an empty ``.c`` collection
- that we'd normally use to create subqueries.
- We also lose typing information about result columns and bound parameters,
- which is often needed to correctly translate data values between
- Python and the database. Overall, the more :func:`.text` we use,
- the less flexibility and ability for manipulation/transformation
- the statement will have.
-
-.. seealso::
-
- :ref:`orm_tutorial_literal_sql` - integrating ORM-level queries with
- :func:`.text`
-
-.. fchanged:: 1.0.0
+.. versionchanged:: 1.0.0
The :func:`.select` construct emits warnings when string SQL
fragments are coerced to :func:`.text`, and :func:`.text` should
be used explicitly. See :ref:`migration_2992` for background.
+
+
.. _sqlexpression_literal_column:
Using More Specific Text with :func:`.table`, :func:`.literal_column`, and :func:`.column`
diff --git a/doc/build/core/type_basics.rst b/doc/build/core/type_basics.rst
index ec3c14dd6..9edba0061 100644
--- a/doc/build/core/type_basics.rst
+++ b/doc/build/core/type_basics.rst
@@ -38,9 +38,6 @@ database column type available on the target database when issuing a
type is emitted in ``CREATE TABLE``, such as ``VARCHAR`` see `SQL
Standard Types`_ and the other sections of this chapter.
-.. autoclass:: Array
- :members:
-
.. autoclass:: BigInteger
:members:
@@ -101,12 +98,19 @@ Standard Types`_ and the other sections of this chapter.
.. _types_sqlstandard:
-SQL Standard Types
-------------------
+SQL Standard and Multiple Vendor Types
+--------------------------------------
+
+This category of types refers to types that are either part of the
+SQL standard, or are potentially found within a subset of database backends.
+Unlike the "generic" types, the SQL standard/multi-vendor types have **no**
+guarantee of working on all backends, and will only work on those backends
+that explicitly support them by name. That is, the type will always emit
+its exact name in DDL with ``CREATE TABLE`` is issued.
-The SQL standard types always create database column types of the same
-name when ``CREATE TABLE`` is issued. Some types may not be supported
-on all databases.
+
+.. autoclass:: ARRAY
+ :members:
.. autoclass:: BIGINT
@@ -140,6 +144,9 @@ on all databases.
.. autoclass:: INT
+.. autoclass:: JSON
+ :members:
+
.. autoclass:: sqlalchemy.types.INTEGER
diff --git a/doc/build/dialects/mysql.rst b/doc/build/dialects/mysql.rst
index 33a0d783b..100f2d2e3 100644
--- a/doc/build/dialects/mysql.rst
+++ b/doc/build/dialects/mysql.rst
@@ -74,6 +74,8 @@ construction arguments, are as follows:
.. autoclass:: INTEGER
:members: __init__
+.. autoclass:: JSON
+ :members:
.. autoclass:: LONGBLOB
:members: __init__
diff --git a/doc/build/dialects/postgresql.rst b/doc/build/dialects/postgresql.rst
index 7e2a20ef7..616924685 100644
--- a/doc/build/dialects/postgresql.rst
+++ b/doc/build/dialects/postgresql.rst
@@ -44,9 +44,7 @@ construction arguments, are as follows:
.. autoclass:: BYTEA
:members: __init__
-
.. autoclass:: CIDR
- :members: __init__
.. autoclass:: DOUBLE_PRECISION
@@ -78,9 +76,6 @@ construction arguments, are as follows:
.. autoclass:: JSONB
:members:
-.. autoclass:: JSONElement
- :members:
-
.. autoclass:: MACADDR
:members: __init__
diff --git a/doc/build/orm/basic_relationships.rst b/doc/build/orm/basic_relationships.rst
index acb2dba01..de156c265 100644
--- a/doc/build/orm/basic_relationships.rst
+++ b/doc/build/orm/basic_relationships.rst
@@ -118,7 +118,7 @@ of the relationship. To convert one-to-many into one-to-one::
__tablename__ = 'child'
id = Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey('parent.id'))
- parent = relationship("Child", back_populates="child")
+ parent = relationship("Parent", back_populates="child")
Or for many-to-one::
@@ -131,7 +131,7 @@ Or for many-to-one::
class Child(Base):
__tablename__ = 'child'
id = Column(Integer, primary_key=True)
- parent = relationship("Child", back_populates="child", uselist=False)
+ parent = relationship("Parent", back_populates="child", uselist=False)
As always, the :paramref:`.relationship.backref` and :func:`.backref` functions
may be used in lieu of the :paramref:`.relationship.back_populates` approach;
@@ -369,13 +369,55 @@ extension allows the configuration of attributes which will
access two "hops" with a single access, one "hop" to the
associated object, and a second to a target attribute.
-.. note::
+.. warning::
- When using the association object pattern, it is advisable that the
- association-mapped table not be used as the
- :paramref:`~.relationship.secondary` argument on a
- :func:`.relationship` elsewhere, unless that :func:`.relationship`
- contains the option :paramref:`~.relationship.viewonly` set to
- ``True``. SQLAlchemy otherwise may attempt to emit redundant INSERT
- and DELETE statements on the same table, if similar state is
- detected on the related attribute as well as the associated object.
+ The association object pattern **does not coordinate changes with a
+ separate relationship that maps the association table as "secondary"**.
+
+ Below, changes made to ``Parent.children`` will not be coordinated
+ with changes made to ``Parent.child_associations`` or
+ ``Child.parent_associations`` in Python; while all of these relationships will continue
+ to function normally by themselves, changes on one will not show up in another
+ until the :class:`.Session` is expired, which normally occurs automatically
+ after :meth:`.Session.commit`::
+
+ class Association(Base):
+ __tablename__ = 'association'
+
+ left_id = Column(Integer, ForeignKey('left.id'), primary_key=True)
+ right_id = Column(Integer, ForeignKey('right.id'), primary_key=True)
+ extra_data = Column(String(50))
+
+ child = relationship("Child", back_populates="parent_associations")
+ parent = relationship("Parent", back_populates="child_associations")
+
+ class Parent(Base):
+ __tablename__ = 'left'
+ id = Column(Integer, primary_key=True)
+
+ children = relationship("Child", secondary="association")
+
+ class Child(Base):
+ __tablename__ = 'right'
+ id = Column(Integer, primary_key=True)
+
+ Additionally, just as changes to one relationship aren't reflected in the
+ others automatically, writing the same data to both relationships will cause
+ conflicting INSERT or DELETE statements as well, such as below where we
+ establish the same relationship between a ``Parent`` and ``Child`` object
+ twice::
+
+ p1 = Parent()
+ c1 = Child()
+ p1.children.append(c1)
+
+ # redundant, will cause a duplicate INSERT on Association
+ p1.parent_associations.append(Association(child=c1))
+
+ It's fine to use a mapping like the above if you know what
+ you're doing, though it may be a good idea to apply the ``viewonly=True`` parameter
+ to the "secondary" relationship to avoid the issue of redundant changes
+ being logged. However, to get a foolproof pattern that allows a simple
+ two-object ``Parent->Child`` relationship while still using the association
+ object pattern, use the association proxy extension
+ as documented at :ref:`associationproxy_toplevel`.
diff --git a/doc/build/orm/collections.rst b/doc/build/orm/collections.rst
index 7d474ce65..577cd233e 100644
--- a/doc/build/orm/collections.rst
+++ b/doc/build/orm/collections.rst
@@ -150,6 +150,10 @@ instances of ``MyOtherClass`` which are not loaded, SQLAlchemy assumes that
"ON DELETE CASCADE" rules will ensure that those rows are deleted by the
database.
+.. seealso::
+
+ :paramref:`.orm.mapper.passive_deletes` - similar feature on :func:`.mapper`
+
.. currentmodule:: sqlalchemy.orm.collections
.. _custom_collections:
diff --git a/doc/build/orm/extensions/declarative/inheritance.rst b/doc/build/orm/extensions/declarative/inheritance.rst
index 684b07bfd..20a51efb2 100644
--- a/doc/build/orm/extensions/declarative/inheritance.rst
+++ b/doc/build/orm/extensions/declarative/inheritance.rst
@@ -8,6 +8,11 @@ as possible. The ``inherits`` mapper keyword argument is not needed
as declarative will determine this from the class itself. The various
"polymorphic" keyword arguments are specified using ``__mapper_args__``.
+.. seealso::
+
+ :ref:`inheritance_toplevel` - general introduction to inheritance
+ mapping with Declarative.
+
Joined Table Inheritance
~~~~~~~~~~~~~~~~~~~~~~~~
@@ -41,10 +46,6 @@ only the ``engineers.id`` column, give it a different attribute name::
primary_language = Column(String(50))
-.. versionchanged:: 0.7 joined table inheritance favors the subclass
- column over that of the superclass, such as querying above
- for ``Engineer.id``. Prior to 0.7 this was the reverse.
-
.. _declarative_single_table:
Single Table Inheritance
@@ -190,10 +191,12 @@ The same concept can be used with mixin classes (see
The above mixin checks the local ``__table__`` attribute for the column.
Because we're using single table inheritance, we're sure that in this case,
-``cls.__table__`` refers to ``People.__table__``. If we were mixing joined-
+``cls.__table__`` refers to ``Person.__table__``. If we were mixing joined-
and single-table inheritance, we might want our mixin to check more carefully
if ``cls.__table__`` is really the :class:`.Table` we're looking for.
+.. _declarative_concrete_table:
+
Concrete Table Inheritance
~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -245,74 +248,13 @@ before the class is built::
__table__ = managers
__mapper_args__ = {'polymorphic_identity':'manager', 'concrete':True}
-.. _declarative_concrete_helpers:
-
-Using the Concrete Helpers
-^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Helper classes provides a simpler pattern for concrete inheritance.
-With these objects, the ``__declare_first__`` helper is used to configure the
-"polymorphic" loader for the mapper after all subclasses have been declared.
-
-.. versionadded:: 0.7.3
-
-An abstract base can be declared using the
-:class:`.AbstractConcreteBase` class::
-
- from sqlalchemy.ext.declarative import AbstractConcreteBase
-
- class Employee(AbstractConcreteBase, Base):
- pass
-
-To have a concrete ``employee`` table, use :class:`.ConcreteBase` instead::
-
- from sqlalchemy.ext.declarative import ConcreteBase
-
- class Employee(ConcreteBase, Base):
- __tablename__ = 'employee'
- employee_id = Column(Integer, primary_key=True)
- name = Column(String(50))
- __mapper_args__ = {
- 'polymorphic_identity':'employee',
- 'concrete':True}
-
-
-Either ``Employee`` base can be used in the normal fashion::
-
- class Manager(Employee):
- __tablename__ = 'manager'
- employee_id = Column(Integer, primary_key=True)
- name = Column(String(50))
- manager_data = Column(String(40))
- __mapper_args__ = {
- 'polymorphic_identity':'manager',
- 'concrete':True}
-
- class Engineer(Employee):
- __tablename__ = 'engineer'
- employee_id = Column(Integer, primary_key=True)
- name = Column(String(50))
- engineer_info = Column(String(40))
- __mapper_args__ = {'polymorphic_identity':'engineer',
- 'concrete':True}
-
-
-The :class:`.AbstractConcreteBase` class is itself mapped, and can be
-used as a target of relationships::
-
- class Company(Base):
- __tablename__ = 'company'
-
- id = Column(Integer, primary_key=True)
- employees = relationship("Employee",
- primaryjoin="Company.id == Employee.company_id")
-
-
-.. versionchanged:: 0.9.3 Support for use of :class:`.AbstractConcreteBase`
- as the target of a :func:`.relationship` has been improved.
+The helper classes :class:`.AbstractConcreteBase` and :class:`.ConcreteBase`
+provide automation for the above system of creating a polymorphic union.
+See the documentation for these helpers as well as the main ORM documentation
+on concrete inheritance for details.
-It can also be queried directly::
+.. seealso::
- for employee in session.query(Employee).filter(Employee.name == 'qbert'):
- print(employee)
+ :ref:`concrete_inheritance`
+ :ref:`inheritance_concrete_helpers`
diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst
index 290d8099e..f640973c4 100644
--- a/doc/build/orm/inheritance.rst
+++ b/doc/build/orm/inheritance.rst
@@ -579,53 +579,114 @@ their own.
Concrete Table Inheritance
--------------------------
-.. note::
-
- this section is currently using classical mappings. The
- Declarative system fully supports concrete inheritance
- however. See the links below for more information on using
- declarative with concrete table inheritance.
-
-This form of inheritance maps each class to a distinct table, as below:
+This form of inheritance maps each class to a distinct table. As concrete
+inheritance has a bit more conceptual overhead, first we'll illustrate
+what these tables look like as Core table metadata:
.. sourcecode:: python+sql
- employees_table = Table('employees', metadata,
- Column('employee_id', Integer, primary_key=True),
+ employees_table = Table(
+ 'employee', metadata,
+ Column('id', Integer, primary_key=True),
Column('name', String(50)),
)
- managers_table = Table('managers', metadata,
- Column('employee_id', Integer, primary_key=True),
+ managers_table = Table(
+ 'manager', metadata,
+ Column('id', Integer, primary_key=True),
Column('name', String(50)),
Column('manager_data', String(50)),
)
- engineers_table = Table('engineers', metadata,
- Column('employee_id', Integer, primary_key=True),
+ engineers_table = Table(
+ 'engineer', metadata,
+ Column('id', Integer, primary_key=True),
Column('name', String(50)),
Column('engineer_info', String(50)),
)
-Notice in this case there is no ``type`` column. If polymorphic loading is not
-required, there's no advantage to using ``inherits`` here; you just define a
-separate mapper for each class.
+Notice in this case there is no ``type`` column; for polymorphic loading,
+additional steps will be needed in order to "manufacture" this information
+during a query.
-.. sourcecode:: python+sql
+Using classical mapping, we can map our three classes independently without
+any relationship between them; the fact that ``Engineer`` and ``Manager``
+inherit from ``Employee`` does not have any impact on a classical mapping::
+
+ class Employee(object):
+ pass
+
+ class Manager(Employee):
+ pass
+
+ class Engineer(Employee):
+ pass
mapper(Employee, employees_table)
mapper(Manager, managers_table)
mapper(Engineer, engineers_table)
-To load polymorphically, the ``with_polymorphic`` argument is required, along
-with a selectable indicating how rows should be loaded. In this case we must
-construct a UNION of all three tables. SQLAlchemy includes a helper function
-to create these called :func:`~sqlalchemy.orm.util.polymorphic_union`, which
+However when using Declarative, Declarative assumes an inheritance mapping
+between the classes because they are already in an inheritance relationship.
+So to map our three classes declaratively, we must include the
+:paramref:`.orm.mapper.concrete` parameter within the ``__mapper_args__``::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+
+ class Manager(Employee):
+ __tablename__ = 'manager'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ manager_data = Column(String(50))
+
+ __mapper_args__ = {
+ 'concrete': True
+ }
+
+ class Engineer(Employee):
+ __tablename__ = 'engineer'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ engineer_info = Column(String(50))
+
+ __mapper_args__ = {
+ 'concrete': True
+ }
+
+Two critical points should be noted:
+
+* We must **define all columns explicitly** on each subclass, even those of
+ the same name. A column such as
+ ``Employee.name`` here is **not** copied out to the tables mapped
+ by ``Manager`` or ``Engineer`` for us.
+
+* while the ``Engineer`` and ``Manager`` classes are
+ mapped in an inheritance relationship with ``Employee``, they still **do not
+ include polymorphic loading**.
+
+Concrete Polymorphic Loading
++++++++++++++++++++++++++++++
+
+To load polymorphically, the :paramref:`.orm.mapper.with_polymorphic` argument is required, along
+with a selectable indicating how rows should be loaded. Polymorphic loading
+is most inefficient with concrete inheritance, so if we do seek this style of
+loading, while it is possible it's less recommended. In the case of concrete
+inheritance, it means we must construct a UNION of all three tables.
+
+First illustrating this with classical mapping, SQLAlchemy includes a helper
+function to create this UNION called :func:`~sqlalchemy.orm.util.polymorphic_union`, which
will map all the different columns into a structure of selects with the same
numbers and names of columns, and also generate a virtual ``type`` column for
-each subselect:
+each subselect. The function is called **after** all three tables are declared,
+and is then combined with the mappers::
-.. sourcecode:: python+sql
+ from sqlalchemy.orm import polymorphic_union
pjoin = polymorphic_union({
'employee': employees_table,
@@ -652,34 +713,171 @@ Upon select, the polymorphic union produces a query like this:
session.query(Employee).all()
{opensql}
- SELECT pjoin.type AS pjoin_type,
- pjoin.manager_data AS pjoin_manager_data,
- pjoin.employee_id AS pjoin_employee_id,
- pjoin.name AS pjoin_name, pjoin.engineer_info AS pjoin_engineer_info
+ SELECT
+ pjoin.id AS pjoin_id,
+ pjoin.name AS pjoin_name,
+ pjoin.type AS pjoin_type,
+ pjoin.manager_data AS pjoin_manager_data,
+ pjoin.engineer_info AS pjoin_engineer_info
FROM (
- SELECT employees.employee_id AS employee_id,
- CAST(NULL AS VARCHAR(50)) AS manager_data, employees.name AS name,
- CAST(NULL AS VARCHAR(50)) AS engineer_info, 'employee' AS type
- FROM employees
- UNION ALL
- SELECT managers.employee_id AS employee_id,
- managers.manager_data AS manager_data, managers.name AS name,
- CAST(NULL AS VARCHAR(50)) AS engineer_info, 'manager' AS type
- FROM managers
- UNION ALL
- SELECT engineers.employee_id AS employee_id,
- CAST(NULL AS VARCHAR(50)) AS manager_data, engineers.name AS name,
- engineers.engineer_info AS engineer_info, 'engineer' AS type
- FROM engineers
+ SELECT
+ employee.id AS id,
+ employee.name AS name,
+ CAST(NULL AS VARCHAR(50)) AS manager_data,
+ CAST(NULL AS VARCHAR(50)) AS engineer_info,
+ 'employee' AS type
+ FROM employee
+ UNION ALL
+ SELECT
+ manager.id AS id,
+ manager.name AS name,
+ manager.manager_data AS manager_data,
+ CAST(NULL AS VARCHAR(50)) AS engineer_info,
+ 'manager' AS type
+ FROM manager
+ UNION ALL
+ SELECT
+ engineer.id AS id,
+ engineer.name AS name,
+ CAST(NULL AS VARCHAR(50)) AS manager_data,
+ engineer.engineer_info AS engineer_info,
+ 'engineer' AS type
+ FROM engineer
) AS pjoin
- []
-Concrete Inheritance with Declarative
-++++++++++++++++++++++++++++++++++++++
+The above UNION query needs to manufacture "NULL" columns for each subtable
+in order to accommodate for those columns that aren't part of the mapping.
+
+In order to map with concrete inheritance and polymorphic loading using
+Declarative, the challenge is to have the polymorphic union ready to go
+when the mappings are created. One way to achieve this is to continue to
+define the table metadata before the actual mapped classes, and specify
+them to each class using ``__table__``::
+
+ class Employee(Base):
+ __table__ = employee_table
+ __mapper_args__ = {
+ 'polymorphic_on':pjoin.c.type,
+ 'with_polymorphic': ('*', pjoin),
+ 'polymorphic_identity':'employee'
+ }
+
+ class Engineer(Employee):
+ __table__ = engineer_table
+ __mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True}
+
+ class Manager(Employee):
+ __table__ = manager_table
+ __mapper_args__ = {'polymorphic_identity':'manager', 'concrete':True}
+
+.. _inheritance_concrete_helpers:
+
+Using the Declarative Helper Classes
++++++++++++++++++++++++++++++++++++++
+
+Another way is to use a special helper class that takes on the fairly
+complicated task of deferring the production of :class:`.Mapper` objects
+until all table metadata has been collected, and the polymorphic union to which
+the mappers will be associated will be available. This is available via
+the :class:`.AbstractConcreteBase` and :class:`.ConcreteBase` classes. For
+our example here, we're using a "concrete" base, e.g. an ``Employee`` row
+can exist by itself that is not an ``Engineer`` or a ``Manager``. The
+mapping would look like::
+
+ from sqlalchemy.ext.declarative import ConcreteBase
+
+ class Employee(ConcreteBase, Base):
+ __tablename__ = 'employee'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+
+ __mapper_args__ = {
+ 'polymorphic_identity':'employee',
+ 'concrete':True
+ }
+
+ class Manager(Employee):
+ __tablename__ = 'manager'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ manager_data = Column(String(40))
+
+ __mapper_args__ = {
+ 'polymorphic_identity':'manager',
+ 'concrete':True
+ }
+
+ class Engineer(Employee):
+ __tablename__ = 'engineer'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ engineer_info = Column(String(40))
+
+ __mapper_args__ = {
+ 'polymorphic_identity':'engineer',
+ 'concrete':True
+ }
+
+There is also the option to use a so-called "abstract" base; where we wont
+actually have an ``employee`` table at all, and instead will only have
+``manager`` and ``engineer`` tables. The ``Employee`` class will never be
+instantiated directly. The change here is that the base mapper is mapped
+directly to the "polymorphic union" selectable, which no longer includes
+the ``employee`` table. In classical mapping, this is::
+
+ from sqlalchemy.orm import polymorphic_union
+
+ pjoin = polymorphic_union({
+ 'manager': managers_table,
+ 'engineer': engineers_table
+ }, 'type', 'pjoin')
+
+ employee_mapper = mapper(Employee, pjoin,
+ with_polymorphic=('*', pjoin),
+ polymorphic_on=pjoin.c.type)
+ manager_mapper = mapper(Manager, managers_table,
+ inherits=employee_mapper,
+ concrete=True,
+ polymorphic_identity='manager')
+ engineer_mapper = mapper(Engineer, engineers_table,
+ inherits=employee_mapper,
+ concrete=True,
+ polymorphic_identity='engineer')
+
+Using the Declarative helpers, the :class:`.AbstractConcreteBase` helper
+can produce this; the mapping would be::
+
+ from sqlalchemy.ext.declarative import AbstractConcreteBase
+
+ class Employee(AbstractConcreteBase, Base):
+ pass
+
+ class Manager(Employee):
+ __tablename__ = 'manager'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ manager_data = Column(String(40))
+
+ __mapper_args__ = {
+ 'polymorphic_identity':'manager',
+ 'concrete':True
+ }
+
+ class Engineer(Employee):
+ __tablename__ = 'engineer'
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50))
+ engineer_info = Column(String(40))
+
+ __mapper_args__ = {
+ 'polymorphic_identity':'engineer',
+ 'concrete':True
+ }
+
+.. seealso::
+
+ :ref:`declarative_concrete_table` - in the Declarative reference documentation
-.. versionadded:: 0.7.3
- The :ref:`declarative_toplevel` module includes helpers for concrete
- inheritance. See :ref:`declarative_concrete_helpers` for more information.
Using Relationships with Inheritance
------------------------------------
diff --git a/doc/build/orm/relationship_persistence.rst b/doc/build/orm/relationship_persistence.rst
index d4fca2c93..597f674ed 100644
--- a/doc/build/orm/relationship_persistence.rst
+++ b/doc/build/orm/relationship_persistence.rst
@@ -1,4 +1,4 @@
-Special Relationship Persistence Patterns
+fSpecial Relationship Persistence Patterns
=========================================
.. _post_update:
@@ -209,6 +209,13 @@ referential integrity is used. When using SQLite, referential integrity
should be enabled, using the configuration described at
:ref:`sqlite_foreign_keys`.
+.. seealso::
+
+ :ref:`passive_deletes` - supporting ON DELETE CASCADE with relationships
+
+ :paramref:`.orm.mapper.passive_updates` - similar feature on :func:`.mapper`
+
+
Simulating limited ON UPDATE CASCADE without foreign key support
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/doc/build/orm/session_basics.rst b/doc/build/orm/session_basics.rst
index dd1162216..0f96ba50a 100644
--- a/doc/build/orm/session_basics.rst
+++ b/doc/build/orm/session_basics.rst
@@ -247,7 +247,7 @@ While there's no one-size-fits-all recommendation for how transaction
scope should be determined, there are common patterns. Especially
if one is writing a web application, the choice is pretty much established.
-A web application is the easiest case because such an appication is already
+A web application is the easiest case because such an application is already
constructed around a single, consistent scope - this is the **request**,
which represents an incoming request from a browser, the processing
of that request to formulate a response, and finally the delivery of that
diff --git a/doc/build/orm/session_events.rst b/doc/build/orm/session_events.rst
index ecfc5176f..27e17717f 100644
--- a/doc/build/orm/session_events.rst
+++ b/doc/build/orm/session_events.rst
@@ -50,7 +50,7 @@ examples such as :ref:`examples_versioned_history` and
^^^^^^^^^^^^^^^^^
The :meth:`.SessionEvents.after_flush` hook is called after the SQL has been
-emitted for a flush process, but **before* the state of the objects that
+emitted for a flush process, but **before** the state of the objects that
were flushed has been altered. That is, you can still inspect
the :attr:`.Session.new`, :attr:`.Session.dirty` and
:attr:`.Session.deleted` collections to see what was just flushed, and
diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst
index 53f161003..559367121 100644
--- a/doc/build/orm/tutorial.rst
+++ b/doc/build/orm/tutorial.rst
@@ -346,8 +346,8 @@ used, it retrieves a connection from a pool of connections maintained by the
session object.
-Adding New Objects
-==================
+Adding and Updating Objects
+===========================
To persist our ``User`` object, we :meth:`~.Session.add` it to our :class:`~sqlalchemy.orm.session.Session`::
@@ -438,7 +438,10 @@ and that three new ``User`` objects are pending:
We tell the :class:`~sqlalchemy.orm.session.Session` that we'd like to issue
all remaining changes to the database and commit the transaction, which has
-been in progress throughout. We do this via :meth:`~.Session.commit`:
+been in progress throughout. We do this via :meth:`~.Session.commit`. The
+:class:`~sqlalchemy.orm.session.Session` emits the ``UPDATE`` statement
+for the password change on "ed", as well as ``INSERT`` statements for the
+three new ``User`` objects we've added:
.. sourcecode:: python+sql
@@ -861,37 +864,19 @@ database results. Here's a brief tour:
.. sourcecode:: python+sql
- {sql}>>> from sqlalchemy.orm.exc import MultipleResultsFound
- >>> try:
- ... user = query.one()
- ... except MultipleResultsFound as e:
- ... print(e)
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? ORDER BY users.id
- ('%ed',)
- {stop}Multiple rows were found for one()
+ >>> user = query.one()
+ Traceback (most recent call last):
+ ...
+ MultipleResultsFound: Multiple rows were found for one()
With no rows found:
.. sourcecode:: python+sql
- {sql}>>> from sqlalchemy.orm.exc import NoResultFound
- >>> try:
- ... user = query.filter(User.id == 99).one()
- ... except NoResultFound as e:
- ... print(e)
- SELECT users.id AS users_id,
- users.name AS users_name,
- users.fullname AS users_fullname,
- users.password AS users_password
- FROM users
- WHERE users.name LIKE ? AND users.id = ? ORDER BY users.id
- ('%ed', 99)
- {stop}No row was found for one()
+ >>> user = query.filter(User.id == 99).one()
+ Traceback (most recent call last):
+ ...
+ NoResultFound: No row was found for one()
The :meth:`~.Query.one` method is great for systems that expect to handle
"no items found" versus "multiple items found" differently; such as a RESTful
@@ -965,10 +950,12 @@ method:
(224, 'fred')
{stop}<User(name='fred', fullname='Fred Flinstone', password='blah')>
-To use an entirely string-based statement, using
-:meth:`~sqlalchemy.orm.query.Query.from_statement()`; just ensure that the
-columns clause of the statement contains the column names normally used by the
-mapper (below illustrated using an asterisk):
+To use an entirely string-based statement, a :func:`.text` construct
+representing a complete statement can be passed to
+:meth:`~sqlalchemy.orm.query.Query.from_statement()`. Without additional
+specifiers, the columns in the string SQL are matched to the model columns
+based on name, such as below where we use just an asterisk to represent
+loading all columns:
.. sourcecode:: python+sql
@@ -979,19 +966,37 @@ mapper (below illustrated using an asterisk):
('ed',)
{stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>]
-Or alternatively, specify how the columns map to the :func:`.text` construct
-explicitly using the :meth:`.TextClause.columns` method:
+Matching columns on name works for simple cases but can become unwieldy when
+dealing with complex statements that contain duplicate column names or when
+using anonymized ORM constructs that don't easily match to specific names.
+Additionally, there is typing behavior present in our mapped columns that
+we might find necessary when handling result rows. For these cases,
+the :func:`~.expression.text` construct allows us to link its textual SQL
+to Core or ORM-mapped column expressions positionally; we can achieve this
+by passing column expressions as positional arguments to the
+:meth:`.TextClause.columns` method:
.. sourcecode:: python+sql
- >>> stmt = text("SELECT name, id FROM users where name=:name")
- >>> stmt = stmt.columns(User.name, User.id)
+ >>> stmt = text("SELECT name, id, fullname, password "
+ ... "FROM users where name=:name")
+ >>> stmt = stmt.columns(User.name, User.id, User.fullname, User.password)
{sql}>>> session.query(User).from_statement(stmt).params(name='ed').all()
- SELECT name, id FROM users where name=?
+ SELECT name, id, fullname, password FROM users where name=?
('ed',)
{stop}[<User(name='ed', fullname='Ed Jones', password='f8s7ccs')>]
-We can choose columns to return individually as well, as in any other case:
+.. versionadded:: 1.1
+
+ The :meth:`.TextClause.columns` method now accepts column expressions
+ which will be matched positionally to a plain text SQL result set,
+ eliminating the need for column names to match or even be unique in the
+ SQL statement.
+
+When selecting from a :func:`~.expression.text` construct, the :class:`.Query`
+may still specify what columns and entities are to be returned; instead of
+``query(User)`` we can also ask for the columns individually, as in
+any other case:
.. sourcecode:: python+sql
@@ -1008,11 +1013,6 @@ We can choose columns to return individually as well, as in any other case:
:ref:`sqlexpression_text` - The :func:`.text` construct explained
from the perspective of Core-only queries.
-.. versionchanged:: 1.0.0
- The :class:`.Query` construct emits warnings when string SQL
- fragments are coerced to :func:`.text`, and :func:`.text` should
- be used explicitly. See :ref:`migration_2992` for background.
-
Counting
--------
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 12d4e8d1c..dde179bf5 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -51,10 +51,11 @@ from .sql import (
union,
union_all,
update,
+ within_group,
)
from .types import (
- Array,
+ ARRAY,
BIGINT,
BINARY,
BLOB,
@@ -76,6 +77,7 @@ from .types import (
INTEGER,
Integer,
Interval,
+ JSON,
LargeBinary,
NCHAR,
NVARCHAR,
diff --git a/lib/sqlalchemy/cextension/resultproxy.c b/lib/sqlalchemy/cextension/resultproxy.c
index ae2a059cf..9c4d0c7e4 100644
--- a/lib/sqlalchemy/cextension/resultproxy.c
+++ b/lib/sqlalchemy/cextension/resultproxy.c
@@ -315,8 +315,11 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
if (exception == NULL)
return NULL;
- // wow. this seems quite excessive.
- cstr_obj = PyObject_Str(key);
+ cstr_obj = PyTuple_GetItem(record, 1);
+ if (cstr_obj == NULL)
+ return NULL;
+
+ cstr_obj = PyObject_Str(cstr_obj);
if (cstr_obj == NULL)
return NULL;
@@ -326,6 +329,8 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
InvalidRequestError without any message like in the
python version.
*/
+
+
#if PY_MAJOR_VERSION >= 3
bytes = PyUnicode_AsASCIIString(cstr_obj);
if (bytes == NULL)
@@ -341,8 +346,8 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
Py_DECREF(cstr_obj);
PyErr_Format(exception,
- "Ambiguous column name '%.200s' in result set! "
- "try 'use_labels' option on select statement.", cstr_key);
+ "Ambiguous column name '%.200s' in "
+ "result set column descriptions", cstr_key);
return NULL;
}
diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py
index d90a83809..f851a4ab8 100644
--- a/lib/sqlalchemy/dialects/__init__.py
+++ b/lib/sqlalchemy/dialects/__init__.py
@@ -43,3 +43,5 @@ def _auto_fn(name):
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
+
+plugins = util.PluginLoader("sqlalchemy.plugins") \ No newline at end of file
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 1ee328e83..815529c88 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -1157,7 +1157,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
def visit_extract(self, extract, **kw):
field = self.extract_map.get(extract.field, extract.field)
- return 'DATEPART("%s", %s)' % \
+ return 'DATEPART(%s, %s)' % \
(field, self.process(extract.expr, **kw))
def visit_savepoint(self, savepoint_stmt):
diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py
index c1f78bd1d..ca204fcd1 100644
--- a/lib/sqlalchemy/dialects/mysql/__init__.py
+++ b/lib/sqlalchemy/dialects/mysql/__init__.py
@@ -15,7 +15,7 @@ base.dialect = mysqldb.dialect
from .base import \
BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, DATE, DATETIME, \
DECIMAL, DOUBLE, ENUM, DECIMAL,\
- FLOAT, INTEGER, INTEGER, LONGBLOB, LONGTEXT, MEDIUMBLOB, \
+ FLOAT, INTEGER, INTEGER, JSON, LONGBLOB, LONGTEXT, MEDIUMBLOB, \
MEDIUMINT, MEDIUMTEXT, NCHAR, \
NVARCHAR, NUMERIC, SET, SMALLINT, REAL, TEXT, TIME, TIMESTAMP, \
TINYBLOB, TINYINT, TINYTEXT,\
@@ -24,8 +24,8 @@ from .base import \
__all__ = (
'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME',
'DECIMAL', 'DOUBLE', 'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER',
- 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT', 'NCHAR',
- 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME',
+ 'JSON', 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT',
+ 'NCHAR', 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME',
'TIMESTAMP', 'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR',
'YEAR', 'dialect'
)
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 8830cb0c1..61c4a3fac 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -536,13 +536,13 @@ output::
"""
-import datetime
import re
import sys
+import json
from ... import schema as sa_schema
from ... import exc, log, sql, util
-from ...sql import compiler
+from ...sql import compiler, elements
from array import array as _array
from ...engine import reflection
@@ -552,6 +552,17 @@ from ...util import topological
from ...types import DATE, BOOLEAN, \
BLOB, BINARY, VARBINARY
+from . import reflection as _reflection
+from .types import BIGINT, BIT, CHAR, DECIMAL, DATETIME, \
+ DOUBLE, FLOAT, INTEGER, LONGBLOB, LONGTEXT, MEDIUMBLOB, MEDIUMINT, \
+ MEDIUMTEXT, NCHAR, NUMERIC, NVARCHAR, REAL, SMALLINT, TEXT, TIME, \
+ TIMESTAMP, TINYBLOB, TINYINT, TINYTEXT, VARCHAR, YEAR
+from .types import _StringType, _IntegerType, _NumericType, \
+ _FloatType, _MatchType
+from .enumerated import ENUM, SET
+from .json import JSON, JSONIndexType, JSONPathType
+
+
RESERVED_WORDS = set(
['accessible', 'add', 'all', 'alter', 'analyze', 'and', 'as', 'asc',
'asensitive', 'before', 'between', 'bigint', 'binary', 'blob', 'both',
@@ -614,1056 +625,6 @@ SET_RE = re.compile(
re.I | re.UNICODE)
-class _NumericType(object):
- """Base for MySQL numeric types.
-
- This is the base both for NUMERIC as well as INTEGER, hence
- it's a mixin.
-
- """
-
- def __init__(self, unsigned=False, zerofill=False, **kw):
- self.unsigned = unsigned
- self.zerofill = zerofill
- super(_NumericType, self).__init__(**kw)
-
- def __repr__(self):
- return util.generic_repr(self,
- to_inspect=[_NumericType, sqltypes.Numeric])
-
-
-class _FloatType(_NumericType, sqltypes.Float):
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- if isinstance(self, (REAL, DOUBLE)) and \
- (
- (precision is None and scale is not None) or
- (precision is not None and scale is None)
- ):
- raise exc.ArgumentError(
- "You must specify both precision and scale or omit "
- "both altogether.")
- super(_FloatType, self).__init__(
- precision=precision, asdecimal=asdecimal, **kw)
- self.scale = scale
-
- def __repr__(self):
- return util.generic_repr(self, to_inspect=[_FloatType,
- _NumericType,
- sqltypes.Float])
-
-
-class _IntegerType(_NumericType, sqltypes.Integer):
- def __init__(self, display_width=None, **kw):
- self.display_width = display_width
- super(_IntegerType, self).__init__(**kw)
-
- def __repr__(self):
- return util.generic_repr(self, to_inspect=[_IntegerType,
- _NumericType,
- sqltypes.Integer])
-
-
-class _StringType(sqltypes.String):
- """Base for MySQL string types."""
-
- def __init__(self, charset=None, collation=None,
- ascii=False, binary=False, unicode=False,
- national=False, **kw):
- self.charset = charset
-
- # allow collate= or collation=
- kw.setdefault('collation', kw.pop('collate', collation))
-
- self.ascii = ascii
- self.unicode = unicode
- self.binary = binary
- self.national = national
- super(_StringType, self).__init__(**kw)
-
- def __repr__(self):
- return util.generic_repr(self,
- to_inspect=[_StringType, sqltypes.String])
-
-
-class _MatchType(sqltypes.Float, sqltypes.MatchType):
- def __init__(self, **kw):
- # TODO: float arguments?
- sqltypes.Float.__init__(self)
- sqltypes.MatchType.__init__(self)
-
-
-
-class NUMERIC(_NumericType, sqltypes.NUMERIC):
- """MySQL NUMERIC type."""
-
- __visit_name__ = 'NUMERIC'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a NUMERIC.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(NUMERIC, self).__init__(precision=precision,
- scale=scale, asdecimal=asdecimal, **kw)
-
-
-class DECIMAL(_NumericType, sqltypes.DECIMAL):
- """MySQL DECIMAL type."""
-
- __visit_name__ = 'DECIMAL'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a DECIMAL.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(DECIMAL, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
-
-class DOUBLE(_FloatType):
- """MySQL DOUBLE type."""
-
- __visit_name__ = 'DOUBLE'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a DOUBLE.
-
- .. note::
-
- The :class:`.DOUBLE` type by default converts from float
- to Decimal, using a truncation that defaults to 10 digits.
- Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
- to change this scale, or ``asdecimal=False`` to return values
- directly as Python floating points.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(DOUBLE, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
-
-class REAL(_FloatType, sqltypes.REAL):
- """MySQL REAL type."""
-
- __visit_name__ = 'REAL'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a REAL.
-
- .. note::
-
- The :class:`.REAL` type by default converts from float
- to Decimal, using a truncation that defaults to 10 digits.
- Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
- to change this scale, or ``asdecimal=False`` to return values
- directly as Python floating points.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(REAL, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
-
-class FLOAT(_FloatType, sqltypes.FLOAT):
- """MySQL FLOAT type."""
-
- __visit_name__ = 'FLOAT'
-
- def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
- """Construct a FLOAT.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(FLOAT, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
- def bind_processor(self, dialect):
- return None
-
-
-class INTEGER(_IntegerType, sqltypes.INTEGER):
- """MySQL INTEGER type."""
-
- __visit_name__ = 'INTEGER'
-
- def __init__(self, display_width=None, **kw):
- """Construct an INTEGER.
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(INTEGER, self).__init__(display_width=display_width, **kw)
-
-
-class BIGINT(_IntegerType, sqltypes.BIGINT):
- """MySQL BIGINTEGER type."""
-
- __visit_name__ = 'BIGINT'
-
- def __init__(self, display_width=None, **kw):
- """Construct a BIGINTEGER.
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(BIGINT, self).__init__(display_width=display_width, **kw)
-
-
-class MEDIUMINT(_IntegerType):
- """MySQL MEDIUMINTEGER type."""
-
- __visit_name__ = 'MEDIUMINT'
-
- def __init__(self, display_width=None, **kw):
- """Construct a MEDIUMINTEGER
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(MEDIUMINT, self).__init__(display_width=display_width, **kw)
-
-
-class TINYINT(_IntegerType):
- """MySQL TINYINT type."""
-
- __visit_name__ = 'TINYINT'
-
- def __init__(self, display_width=None, **kw):
- """Construct a TINYINT.
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(TINYINT, self).__init__(display_width=display_width, **kw)
-
-
-class SMALLINT(_IntegerType, sqltypes.SMALLINT):
- """MySQL SMALLINTEGER type."""
-
- __visit_name__ = 'SMALLINT'
-
- def __init__(self, display_width=None, **kw):
- """Construct a SMALLINTEGER.
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(SMALLINT, self).__init__(display_width=display_width, **kw)
-
-
-class BIT(sqltypes.TypeEngine):
- """MySQL BIT type.
-
- This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
- for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a
- MSTinyInteger() type.
-
- """
-
- __visit_name__ = 'BIT'
-
- def __init__(self, length=None):
- """Construct a BIT.
-
- :param length: Optional, number of bits.
-
- """
- self.length = length
-
- def result_processor(self, dialect, coltype):
- """Convert a MySQL's 64 bit, variable length binary string to a long.
-
- TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector
- already do this, so this logic should be moved to those dialects.
-
- """
-
- def process(value):
- if value is not None:
- v = 0
- for i in value:
- if not isinstance(i, int):
- i = ord(i) # convert byte to int on Python 2
- v = v << 8 | i
- return v
- return value
- return process
-
-
-class TIME(sqltypes.TIME):
- """MySQL TIME type. """
-
- __visit_name__ = 'TIME'
-
- def __init__(self, timezone=False, fsp=None):
- """Construct a MySQL TIME type.
-
- :param timezone: not used by the MySQL dialect.
- :param fsp: fractional seconds precision value.
- MySQL 5.6 supports storage of fractional seconds;
- this parameter will be used when emitting DDL
- for the TIME type.
-
- .. note::
-
- DBAPI driver support for fractional seconds may
- be limited; current support includes
- MySQL Connector/Python.
-
- .. versionadded:: 0.8 The MySQL-specific TIME
- type as well as fractional seconds support.
-
- """
- super(TIME, self).__init__(timezone=timezone)
- self.fsp = fsp
-
- def result_processor(self, dialect, coltype):
- time = datetime.time
-
- def process(value):
- # convert from a timedelta value
- if value is not None:
- microseconds = value.microseconds
- seconds = value.seconds
- minutes = seconds // 60
- return time(minutes // 60,
- minutes % 60,
- seconds - minutes * 60,
- microsecond=microseconds)
- else:
- return None
- return process
-
-
-class TIMESTAMP(sqltypes.TIMESTAMP):
- """MySQL TIMESTAMP type.
-
- """
-
- __visit_name__ = 'TIMESTAMP'
-
- def __init__(self, timezone=False, fsp=None):
- """Construct a MySQL TIMESTAMP type.
-
- :param timezone: not used by the MySQL dialect.
- :param fsp: fractional seconds precision value.
- MySQL 5.6.4 supports storage of fractional seconds;
- this parameter will be used when emitting DDL
- for the TIMESTAMP type.
-
- .. note::
-
- DBAPI driver support for fractional seconds may
- be limited; current support includes
- MySQL Connector/Python.
-
- .. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.TIMESTAMP`
- with fractional seconds support.
-
- """
- super(TIMESTAMP, self).__init__(timezone=timezone)
- self.fsp = fsp
-
-
-class DATETIME(sqltypes.DATETIME):
- """MySQL DATETIME type.
-
- """
-
- __visit_name__ = 'DATETIME'
-
- def __init__(self, timezone=False, fsp=None):
- """Construct a MySQL DATETIME type.
-
- :param timezone: not used by the MySQL dialect.
- :param fsp: fractional seconds precision value.
- MySQL 5.6.4 supports storage of fractional seconds;
- this parameter will be used when emitting DDL
- for the DATETIME type.
-
- .. note::
-
- DBAPI driver support for fractional seconds may
- be limited; current support includes
- MySQL Connector/Python.
-
- .. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.DATETIME`
- with fractional seconds support.
-
- """
- super(DATETIME, self).__init__(timezone=timezone)
- self.fsp = fsp
-
-
-class YEAR(sqltypes.TypeEngine):
- """MySQL YEAR type, for single byte storage of years 1901-2155."""
-
- __visit_name__ = 'YEAR'
-
- def __init__(self, display_width=None):
- self.display_width = display_width
-
-
-class TEXT(_StringType, sqltypes.TEXT):
- """MySQL TEXT type, for text up to 2^16 characters."""
-
- __visit_name__ = 'TEXT'
-
- def __init__(self, length=None, **kw):
- """Construct a TEXT.
-
- :param length: Optional, if provided the server may optimize storage
- by substituting the smallest TEXT type sufficient to store
- ``length`` characters.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(TEXT, self).__init__(length=length, **kw)
-
-
-class TINYTEXT(_StringType):
- """MySQL TINYTEXT type, for text up to 2^8 characters."""
-
- __visit_name__ = 'TINYTEXT'
-
- def __init__(self, **kwargs):
- """Construct a TINYTEXT.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(TINYTEXT, self).__init__(**kwargs)
-
-
-class MEDIUMTEXT(_StringType):
- """MySQL MEDIUMTEXT type, for text up to 2^24 characters."""
-
- __visit_name__ = 'MEDIUMTEXT'
-
- def __init__(self, **kwargs):
- """Construct a MEDIUMTEXT.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(MEDIUMTEXT, self).__init__(**kwargs)
-
-
-class LONGTEXT(_StringType):
- """MySQL LONGTEXT type, for text up to 2^32 characters."""
-
- __visit_name__ = 'LONGTEXT'
-
- def __init__(self, **kwargs):
- """Construct a LONGTEXT.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(LONGTEXT, self).__init__(**kwargs)
-
-
-class VARCHAR(_StringType, sqltypes.VARCHAR):
- """MySQL VARCHAR type, for variable-length character data."""
-
- __visit_name__ = 'VARCHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct a VARCHAR.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(VARCHAR, self).__init__(length=length, **kwargs)
-
-
-class CHAR(_StringType, sqltypes.CHAR):
- """MySQL CHAR type, for fixed-length character data."""
-
- __visit_name__ = 'CHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct a CHAR.
-
- :param length: Maximum data length, in characters.
-
- :param binary: Optional, use the default binary collation for the
- national character set. This does not affect the type of data
- stored, use a BINARY type for binary data.
-
- :param collation: Optional, request a particular collation. Must be
- compatible with the national character set.
-
- """
- super(CHAR, self).__init__(length=length, **kwargs)
-
- @classmethod
- def _adapt_string_for_cast(self, type_):
- # copy the given string type into a CHAR
- # for the purposes of rendering a CAST expression
- type_ = sqltypes.to_instance(type_)
- if isinstance(type_, sqltypes.CHAR):
- return type_
- elif isinstance(type_, _StringType):
- return CHAR(
- length=type_.length,
- charset=type_.charset,
- collation=type_.collation,
- ascii=type_.ascii,
- binary=type_.binary,
- unicode=type_.unicode,
- national=False # not supported in CAST
- )
- else:
- return CHAR(length=type_.length)
-
-
-class NVARCHAR(_StringType, sqltypes.NVARCHAR):
- """MySQL NVARCHAR type.
-
- For variable-length character data in the server's configured national
- character set.
- """
-
- __visit_name__ = 'NVARCHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct an NVARCHAR.
-
- :param length: Maximum data length, in characters.
-
- :param binary: Optional, use the default binary collation for the
- national character set. This does not affect the type of data
- stored, use a BINARY type for binary data.
-
- :param collation: Optional, request a particular collation. Must be
- compatible with the national character set.
-
- """
- kwargs['national'] = True
- super(NVARCHAR, self).__init__(length=length, **kwargs)
-
-
-class NCHAR(_StringType, sqltypes.NCHAR):
- """MySQL NCHAR type.
-
- For fixed-length character data in the server's configured national
- character set.
- """
-
- __visit_name__ = 'NCHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct an NCHAR.
-
- :param length: Maximum data length, in characters.
-
- :param binary: Optional, use the default binary collation for the
- national character set. This does not affect the type of data
- stored, use a BINARY type for binary data.
-
- :param collation: Optional, request a particular collation. Must be
- compatible with the national character set.
-
- """
- kwargs['national'] = True
- super(NCHAR, self).__init__(length=length, **kwargs)
-
-
-class TINYBLOB(sqltypes._Binary):
- """MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
-
- __visit_name__ = 'TINYBLOB'
-
-
-class MEDIUMBLOB(sqltypes._Binary):
- """MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
-
- __visit_name__ = 'MEDIUMBLOB'
-
-
-class LONGBLOB(sqltypes._Binary):
- """MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
-
- __visit_name__ = 'LONGBLOB'
-
-
-class _EnumeratedValues(_StringType):
- def _init_values(self, values, kw):
- self.quoting = kw.pop('quoting', 'auto')
-
- if self.quoting == 'auto' and len(values):
- # What quoting character are we using?
- q = None
- for e in values:
- if len(e) == 0:
- self.quoting = 'unquoted'
- break
- elif q is None:
- q = e[0]
-
- if len(e) == 1 or e[0] != q or e[-1] != q:
- self.quoting = 'unquoted'
- break
- else:
- self.quoting = 'quoted'
-
- if self.quoting == 'quoted':
- util.warn_deprecated(
- 'Manually quoting %s value literals is deprecated. Supply '
- 'unquoted values and use the quoting= option in cases of '
- 'ambiguity.' % self.__class__.__name__)
-
- values = self._strip_values(values)
-
- self._enumerated_values = values
- length = max([len(v) for v in values] + [0])
- return values, length
-
- @classmethod
- def _strip_values(cls, values):
- strip_values = []
- for a in values:
- if a[0:1] == '"' or a[0:1] == "'":
- # strip enclosing quotes and unquote interior
- a = a[1:-1].replace(a[0] * 2, a[0])
- strip_values.append(a)
- return strip_values
-
-
-class ENUM(sqltypes.Enum, _EnumeratedValues):
- """MySQL ENUM type."""
-
- __visit_name__ = 'ENUM'
-
- def __init__(self, *enums, **kw):
- """Construct an ENUM.
-
- E.g.::
-
- Column('myenum', ENUM("foo", "bar", "baz"))
-
- :param enums: The range of valid values for this ENUM. Values will be
- quoted when generating the schema according to the quoting flag (see
- below).
-
- :param strict: Defaults to False: ensure that a given value is in this
- ENUM's range of permissible values when inserting or updating rows.
- Note that MySQL will not raise a fatal error if you attempt to store
- an out of range value- an alternate value will be stored instead.
- (See MySQL ENUM documentation.)
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- :param quoting: Defaults to 'auto': automatically determine enum value
- quoting. If all enum values are surrounded by the same quoting
- character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
-
- 'quoted': values in enums are already quoted, they will be used
- directly when generating the schema - this usage is deprecated.
-
- 'unquoted': values in enums are not quoted, they will be escaped and
- surrounded by single quotes when generating the schema.
-
- Previous versions of this type always required manually quoted
- values to be supplied; future versions will always quote the string
- literals for you. This is a transitional option.
-
- """
- values, length = self._init_values(enums, kw)
- self.strict = kw.pop('strict', False)
- kw.pop('metadata', None)
- kw.pop('schema', None)
- kw.pop('name', None)
- kw.pop('quote', None)
- kw.pop('native_enum', None)
- kw.pop('inherit_schema', None)
- kw.pop('_create_events', None)
- _StringType.__init__(self, length=length, **kw)
- sqltypes.Enum.__init__(self, *values)
-
- def __repr__(self):
- return util.generic_repr(
- self, to_inspect=[ENUM, _StringType, sqltypes.Enum])
-
- def bind_processor(self, dialect):
- super_convert = super(ENUM, self).bind_processor(dialect)
-
- def process(value):
- if self.strict and value is not None and value not in self.enums:
- raise exc.InvalidRequestError('"%s" not a valid value for '
- 'this enum' % value)
- if super_convert:
- return super_convert(value)
- else:
- return value
- return process
-
- def adapt(self, cls, **kw):
- if issubclass(cls, ENUM):
- kw['strict'] = self.strict
- return sqltypes.Enum.adapt(self, cls, **kw)
-
-
-class SET(_EnumeratedValues):
- """MySQL SET type."""
-
- __visit_name__ = 'SET'
-
- def __init__(self, *values, **kw):
- """Construct a SET.
-
- E.g.::
-
- Column('myset', SET("foo", "bar", "baz"))
-
-
- The list of potential values is required in the case that this
- set will be used to generate DDL for a table, or if the
- :paramref:`.SET.retrieve_as_bitwise` flag is set to True.
-
- :param values: The range of valid values for this SET.
-
- :param convert_unicode: Same flag as that of
- :paramref:`.String.convert_unicode`.
-
- :param collation: same as that of :paramref:`.String.collation`
-
- :param charset: same as that of :paramref:`.VARCHAR.charset`.
-
- :param ascii: same as that of :paramref:`.VARCHAR.ascii`.
-
- :param unicode: same as that of :paramref:`.VARCHAR.unicode`.
-
- :param binary: same as that of :paramref:`.VARCHAR.binary`.
-
- :param quoting: Defaults to 'auto': automatically determine set value
- quoting. If all values are surrounded by the same quoting
- character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
-
- 'quoted': values in enums are already quoted, they will be used
- directly when generating the schema - this usage is deprecated.
-
- 'unquoted': values in enums are not quoted, they will be escaped and
- surrounded by single quotes when generating the schema.
-
- Previous versions of this type always required manually quoted
- values to be supplied; future versions will always quote the string
- literals for you. This is a transitional option.
-
- .. versionadded:: 0.9.0
-
- :param retrieve_as_bitwise: if True, the data for the set type will be
- persisted and selected using an integer value, where a set is coerced
- into a bitwise mask for persistence. MySQL allows this mode which
- has the advantage of being able to store values unambiguously,
- such as the blank string ``''``. The datatype will appear
- as the expression ``col + 0`` in a SELECT statement, so that the
- value is coerced into an integer value in result sets.
- This flag is required if one wishes
- to persist a set that can store the blank string ``''`` as a value.
-
- .. warning::
-
- When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is
- essential that the list of set values is expressed in the
- **exact same order** as exists on the MySQL database.
-
- .. versionadded:: 1.0.0
-
-
- """
- self.retrieve_as_bitwise = kw.pop('retrieve_as_bitwise', False)
- values, length = self._init_values(values, kw)
- self.values = tuple(values)
- if not self.retrieve_as_bitwise and '' in values:
- raise exc.ArgumentError(
- "Can't use the blank value '' in a SET without "
- "setting retrieve_as_bitwise=True")
- if self.retrieve_as_bitwise:
- self._bitmap = dict(
- (value, 2 ** idx)
- for idx, value in enumerate(self.values)
- )
- self._bitmap.update(
- (2 ** idx, value)
- for idx, value in enumerate(self.values)
- )
- kw.setdefault('length', length)
- super(SET, self).__init__(**kw)
-
- def column_expression(self, colexpr):
- if self.retrieve_as_bitwise:
- return sql.type_coerce(
- sql.type_coerce(colexpr, sqltypes.Integer) + 0,
- self
- )
- else:
- return colexpr
-
- def result_processor(self, dialect, coltype):
- if self.retrieve_as_bitwise:
- def process(value):
- if value is not None:
- value = int(value)
-
- return set(
- util.map_bits(self._bitmap.__getitem__, value)
- )
- else:
- return None
- else:
- super_convert = super(SET, self).result_processor(dialect, coltype)
-
- def process(value):
- if isinstance(value, util.string_types):
- # MySQLdb returns a string, let's parse
- if super_convert:
- value = super_convert(value)
- return set(re.findall(r'[^,]+', value))
- else:
- # mysql-connector-python does a naive
- # split(",") which throws in an empty string
- if value is not None:
- value.discard('')
- return value
- return process
-
- def bind_processor(self, dialect):
- super_convert = super(SET, self).bind_processor(dialect)
- if self.retrieve_as_bitwise:
- def process(value):
- if value is None:
- return None
- elif isinstance(value, util.int_types + util.string_types):
- if super_convert:
- return super_convert(value)
- else:
- return value
- else:
- int_value = 0
- for v in value:
- int_value |= self._bitmap[v]
- return int_value
- else:
-
- def process(value):
- # accept strings and int (actually bitflag) values directly
- if value is not None and not isinstance(
- value, util.int_types + util.string_types):
- value = ",".join(value)
-
- if super_convert:
- return super_convert(value)
- else:
- return value
- return process
-
- def adapt(self, impltype, **kw):
- kw['retrieve_as_bitwise'] = self.retrieve_as_bitwise
- return util.constructor_copy(
- self, impltype,
- *self.values,
- **kw
- )
-
# old names
MSTime = TIME
MSSet = SET
@@ -1704,7 +665,11 @@ colspecs = {
sqltypes.Float: FLOAT,
sqltypes.Time: TIME,
sqltypes.Enum: ENUM,
- sqltypes.MatchType: _MatchType
+ sqltypes.MatchType: _MatchType,
+ sqltypes.JSON: JSON,
+ sqltypes.JSON.JSONIndexType: JSONIndexType,
+ sqltypes.JSON.JSONPathType: JSONPathType
+
}
# Everything 3.23 through 5.1 excepting OpenGIS types.
@@ -1724,6 +689,7 @@ ischema_names = {
'float': FLOAT,
'int': INTEGER,
'integer': INTEGER,
+ 'json': JSON,
'longblob': LONGBLOB,
'longtext': LONGTEXT,
'mediumblob': MEDIUMBLOB,
@@ -1769,6 +735,16 @@ class MySQLCompiler(compiler.SQLCompiler):
def visit_sysdate_func(self, fn, **kw):
return "SYSDATE()"
+ def visit_json_getitem_op_binary(self, binary, operator, **kw):
+ return "JSON_EXTRACT(%s, %s)" % (
+ self.process(binary.left),
+ self.process(binary.right))
+
+ def visit_json_path_getitem_op_binary(self, binary, operator, **kw):
+ return "JSON_EXTRACT(%s, %s)" % (
+ self.process(binary.left),
+ self.process(binary.right))
+
def visit_concat_op_binary(self, binary, operator, **kw):
return "concat(%s, %s)" % (self.process(binary.left),
self.process(binary.right))
@@ -1801,6 +777,8 @@ class MySQLCompiler(compiler.SQLCompiler):
return self.dialect.type_compiler.process(adapted)
elif isinstance(type_, sqltypes._Binary):
return 'BINARY'
+ elif isinstance(type_, sqltypes.JSON):
+ return "JSON"
elif isinstance(type_, sqltypes.NUMERIC):
return self.dialect.type_compiler.process(
type_).replace('NUMERIC', 'DECIMAL')
@@ -1974,7 +952,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
('PARTITION_BY', 'PARTITIONS'), # only for test consistency
], opts):
arg = opts[opt]
- if opt in _options_of_type_string:
+ if opt in _reflection._options_of_type_string:
arg = "'%s'" % arg.replace("\\", "\\\\").replace("'", "''")
if opt in ('DATA_DIRECTORY', 'INDEX_DIRECTORY',
@@ -2316,6 +1294,9 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
def visit_VARBINARY(self, type_, **kw):
return "VARBINARY(%d)" % type_.length
+ def visit_JSON(self, type_, **kw):
+ return "JSON"
+
def visit_large_binary(self, type_, **kw):
return self.visit_BLOB(type_)
@@ -2435,10 +1416,13 @@ class MySQLDialect(default.DefaultDialect):
})
]
- def __init__(self, isolation_level=None, **kwargs):
+ def __init__(self, isolation_level=None, json_serializer=None,
+ json_deserializer=None, **kwargs):
kwargs.pop('use_ansiquotes', None) # legacy
default.DefaultDialect.__init__(self, **kwargs)
self.isolation_level = isolation_level
+ self._json_serializer = json_serializer
+ self._json_deserializer = json_deserializer
def on_connect(self):
if self.isolation_level is not None:
@@ -2605,6 +1589,10 @@ class MySQLDialect(default.DefaultDialect):
default.DefaultDialect.initialize(self, connection)
@property
+ def _is_mariadb(self):
+ return 'MariaDB' in self.server_version_info
+
+ @property
def _supports_cast(self):
return self.server_version_info is None or \
self.server_version_info >= (4, 0, 2)
@@ -2796,7 +1784,7 @@ class MySQLDialect(default.DefaultDialect):
preparer = self.preparer(self, server_ansiquotes=False)
else:
preparer = self.identifier_preparer
- return MySQLTableDefinitionParser(self, preparer)
+ return _reflection.MySQLTableDefinitionParser(self, preparer)
@reflection.cache
def _setup_parser(self, connection, table_name, schema=None, **kw):
@@ -2928,430 +1916,6 @@ class MySQLDialect(default.DefaultDialect):
return rows
-class ReflectedState(object):
- """Stores raw information about a SHOW CREATE TABLE statement."""
-
- def __init__(self):
- self.columns = []
- self.table_options = {}
- self.table_name = None
- self.keys = []
- self.constraints = []
-
-
-@log.class_logger
-class MySQLTableDefinitionParser(object):
- """Parses the results of a SHOW CREATE TABLE statement."""
-
- def __init__(self, dialect, preparer):
- self.dialect = dialect
- self.preparer = preparer
- self._prep_regexes()
-
- def parse(self, show_create, charset):
- state = ReflectedState()
- state.charset = charset
- for line in re.split(r'\r?\n', show_create):
- if line.startswith(' ' + self.preparer.initial_quote):
- self._parse_column(line, state)
- # a regular table options line
- elif line.startswith(') '):
- self._parse_table_options(line, state)
- # an ANSI-mode table options line
- elif line == ')':
- pass
- elif line.startswith('CREATE '):
- self._parse_table_name(line, state)
- # Not present in real reflection, but may be if
- # loading from a file.
- elif not line:
- pass
- else:
- type_, spec = self._parse_constraints(line)
- if type_ is None:
- util.warn("Unknown schema content: %r" % line)
- elif type_ == 'key':
- state.keys.append(spec)
- elif type_ == 'constraint':
- state.constraints.append(spec)
- else:
- pass
- return state
-
- def _parse_constraints(self, line):
- """Parse a KEY or CONSTRAINT line.
-
- :param line: A line of SHOW CREATE TABLE output
- """
-
- # KEY
- m = self._re_key.match(line)
- if m:
- spec = m.groupdict()
- # convert columns into name, length pairs
- spec['columns'] = self._parse_keyexprs(spec['columns'])
- return 'key', spec
-
- # CONSTRAINT
- m = self._re_constraint.match(line)
- if m:
- spec = m.groupdict()
- spec['table'] = \
- self.preparer.unformat_identifiers(spec['table'])
- spec['local'] = [c[0]
- for c in self._parse_keyexprs(spec['local'])]
- spec['foreign'] = [c[0]
- for c in self._parse_keyexprs(spec['foreign'])]
- return 'constraint', spec
-
- # PARTITION and SUBPARTITION
- m = self._re_partition.match(line)
- if m:
- # Punt!
- return 'partition', line
-
- # No match.
- return (None, line)
-
- def _parse_table_name(self, line, state):
- """Extract the table name.
-
- :param line: The first line of SHOW CREATE TABLE
- """
-
- regex, cleanup = self._pr_name
- m = regex.match(line)
- if m:
- state.table_name = cleanup(m.group('name'))
-
- def _parse_table_options(self, line, state):
- """Build a dictionary of all reflected table-level options.
-
- :param line: The final line of SHOW CREATE TABLE output.
- """
-
- options = {}
-
- if not line or line == ')':
- pass
-
- else:
- rest_of_line = line[:]
- for regex, cleanup in self._pr_options:
- m = regex.search(rest_of_line)
- if not m:
- continue
- directive, value = m.group('directive'), m.group('val')
- if cleanup:
- value = cleanup(value)
- options[directive.lower()] = value
- rest_of_line = regex.sub('', rest_of_line)
-
- for nope in ('auto_increment', 'data directory', 'index directory'):
- options.pop(nope, None)
-
- for opt, val in options.items():
- state.table_options['%s_%s' % (self.dialect.name, opt)] = val
-
- def _parse_column(self, line, state):
- """Extract column details.
-
- Falls back to a 'minimal support' variant if full parse fails.
-
- :param line: Any column-bearing line from SHOW CREATE TABLE
- """
-
- spec = None
- m = self._re_column.match(line)
- if m:
- spec = m.groupdict()
- spec['full'] = True
- else:
- m = self._re_column_loose.match(line)
- if m:
- spec = m.groupdict()
- spec['full'] = False
- if not spec:
- util.warn("Unknown column definition %r" % line)
- return
- if not spec['full']:
- util.warn("Incomplete reflection of column definition %r" % line)
-
- name, type_, args = spec['name'], spec['coltype'], spec['arg']
-
- try:
- col_type = self.dialect.ischema_names[type_]
- except KeyError:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (type_, name))
- col_type = sqltypes.NullType
-
- # Column type positional arguments eg. varchar(32)
- if args is None or args == '':
- type_args = []
- elif args[0] == "'" and args[-1] == "'":
- type_args = self._re_csv_str.findall(args)
- else:
- type_args = [int(v) for v in self._re_csv_int.findall(args)]
-
- # Column type keyword options
- type_kw = {}
-
- if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)):
- if type_args:
- type_kw['fsp'] = type_args.pop(0)
-
- for kw in ('unsigned', 'zerofill'):
- if spec.get(kw, False):
- type_kw[kw] = True
- for kw in ('charset', 'collate'):
- if spec.get(kw, False):
- type_kw[kw] = spec[kw]
- if issubclass(col_type, _EnumeratedValues):
- type_args = _EnumeratedValues._strip_values(type_args)
-
- if issubclass(col_type, SET) and '' in type_args:
- type_kw['retrieve_as_bitwise'] = True
-
- type_instance = col_type(*type_args, **type_kw)
-
- col_kw = {}
-
- # NOT NULL
- col_kw['nullable'] = True
- # this can be "NULL" in the case of TIMESTAMP
- if spec.get('notnull', False) == 'NOT NULL':
- col_kw['nullable'] = False
-
- # AUTO_INCREMENT
- if spec.get('autoincr', False):
- col_kw['autoincrement'] = True
- elif issubclass(col_type, sqltypes.Integer):
- col_kw['autoincrement'] = False
-
- # DEFAULT
- default = spec.get('default', None)
-
- if default == 'NULL':
- # eliminates the need to deal with this later.
- default = None
-
- col_d = dict(name=name, type=type_instance, default=default)
- col_d.update(col_kw)
- state.columns.append(col_d)
-
- def _describe_to_create(self, table_name, columns):
- """Re-format DESCRIBE output as a SHOW CREATE TABLE string.
-
- DESCRIBE is a much simpler reflection and is sufficient for
- reflecting views for runtime use. This method formats DDL
- for columns only- keys are omitted.
-
- :param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples.
- SHOW FULL COLUMNS FROM rows must be rearranged for use with
- this function.
- """
-
- buffer = []
- for row in columns:
- (name, col_type, nullable, default, extra) = \
- [row[i] for i in (0, 1, 2, 4, 5)]
-
- line = [' ']
- line.append(self.preparer.quote_identifier(name))
- line.append(col_type)
- if not nullable:
- line.append('NOT NULL')
- if default:
- if 'auto_increment' in default:
- pass
- elif (col_type.startswith('timestamp') and
- default.startswith('C')):
- line.append('DEFAULT')
- line.append(default)
- elif default == 'NULL':
- line.append('DEFAULT')
- line.append(default)
- else:
- line.append('DEFAULT')
- line.append("'%s'" % default.replace("'", "''"))
- if extra:
- line.append(extra)
-
- buffer.append(' '.join(line))
-
- return ''.join([('CREATE TABLE %s (\n' %
- self.preparer.quote_identifier(table_name)),
- ',\n'.join(buffer),
- '\n) '])
-
- def _parse_keyexprs(self, identifiers):
- """Unpack '"col"(2),"col" ASC'-ish strings into components."""
-
- return self._re_keyexprs.findall(identifiers)
-
- def _prep_regexes(self):
- """Pre-compile regular expressions."""
-
- self._re_columns = []
- self._pr_options = []
-
- _final = self.preparer.final_quote
-
- quotes = dict(zip(('iq', 'fq', 'esc_fq'),
- [re.escape(s) for s in
- (self.preparer.initial_quote,
- _final,
- self.preparer._escape_identifier(_final))]))
-
- self._pr_name = _pr_compile(
- r'^CREATE (?:\w+ +)?TABLE +'
- r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($' % quotes,
- self.preparer._unescape_identifier)
-
- # `col`,`col2`(32),`col3`(15) DESC
- #
- # Note: ASC and DESC aren't reflected, so we'll punt...
- self._re_keyexprs = _re_compile(
- r'(?:'
- r'(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)'
- r'(?:\((\d+)\))?(?=\,|$))+' % quotes)
-
- # 'foo' or 'foo','bar' or 'fo,o','ba''a''r'
- self._re_csv_str = _re_compile(r'\x27(?:\x27\x27|[^\x27])*\x27')
-
- # 123 or 123,456
- self._re_csv_int = _re_compile(r'\d+')
-
- # `colname` <type> [type opts]
- # (NOT NULL | NULL)
- # DEFAULT ('value' | CURRENT_TIMESTAMP...)
- # COMMENT 'comment'
- # COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT)
- # STORAGE (DISK|MEMORY)
- self._re_column = _re_compile(
- r' '
- r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
- r'(?P<coltype>\w+)'
- r'(?:\((?P<arg>(?:\d+|\d+,\d+|'
- r'(?:\x27(?:\x27\x27|[^\x27])*\x27,?)+))\))?'
- r'(?: +(?P<unsigned>UNSIGNED))?'
- r'(?: +(?P<zerofill>ZEROFILL))?'
- r'(?: +CHARACTER SET +(?P<charset>[\w_]+))?'
- r'(?: +COLLATE +(?P<collate>[\w_]+))?'
- r'(?: +(?P<notnull>(?:NOT )?NULL))?'
- r'(?: +DEFAULT +(?P<default>'
- r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+'
- r'(?: +ON UPDATE \w+)?)'
- r'))?'
- r'(?: +(?P<autoincr>AUTO_INCREMENT))?'
- r'(?: +COMMENT +(P<comment>(?:\x27\x27|[^\x27])+))?'
- r'(?: +COLUMN_FORMAT +(?P<colfmt>\w+))?'
- r'(?: +STORAGE +(?P<storage>\w+))?'
- r'(?: +(?P<extra>.*))?'
- r',?$'
- % quotes
- )
-
- # Fallback, try to parse as little as possible
- self._re_column_loose = _re_compile(
- r' '
- r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
- r'(?P<coltype>\w+)'
- r'(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?'
- r'.*?(?P<notnull>(?:NOT )NULL)?'
- % quotes
- )
-
- # (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))?
- # (`col` (ASC|DESC)?, `col` (ASC|DESC)?)
- # KEY_BLOCK_SIZE size | WITH PARSER name
- self._re_key = _re_compile(
- r' '
- r'(?:(?P<type>\S+) )?KEY'
- r'(?: +%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?'
- r'(?: +USING +(?P<using_pre>\S+))?'
- r' +\((?P<columns>.+?)\)'
- r'(?: +USING +(?P<using_post>\S+))?'
- r'(?: +KEY_BLOCK_SIZE *[ =]? *(?P<keyblock>\S+))?'
- r'(?: +WITH PARSER +(?P<parser>\S+))?'
- r',?$'
- % quotes
- )
-
- # CONSTRAINT `name` FOREIGN KEY (`local_col`)
- # REFERENCES `remote` (`remote_col`)
- # MATCH FULL | MATCH PARTIAL | MATCH SIMPLE
- # ON DELETE CASCADE ON UPDATE RESTRICT
- #
- # unique constraints come back as KEYs
- kw = quotes.copy()
- kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION'
- self._re_constraint = _re_compile(
- r' '
- r'CONSTRAINT +'
- r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
- r'FOREIGN KEY +'
- r'\((?P<local>[^\)]+?)\) REFERENCES +'
- r'(?P<table>%(iq)s[^%(fq)s]+%(fq)s'
- r'(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +'
- r'\((?P<foreign>[^\)]+?)\)'
- r'(?: +(?P<match>MATCH \w+))?'
- r'(?: +ON DELETE (?P<ondelete>%(on)s))?'
- r'(?: +ON UPDATE (?P<onupdate>%(on)s))?'
- % kw
- )
-
- # PARTITION
- #
- # punt!
- self._re_partition = _re_compile(r'(?:.*)(?:SUB)?PARTITION(?:.*)')
-
- # Table-level options (COLLATE, ENGINE, etc.)
- # Do the string options first, since they have quoted
- # strings we need to get rid of.
- for option in _options_of_type_string:
- self._add_option_string(option)
-
- for option in ('ENGINE', 'TYPE', 'AUTO_INCREMENT',
- 'AVG_ROW_LENGTH', 'CHARACTER SET',
- 'DEFAULT CHARSET', 'CHECKSUM',
- 'COLLATE', 'DELAY_KEY_WRITE', 'INSERT_METHOD',
- 'MAX_ROWS', 'MIN_ROWS', 'PACK_KEYS', 'ROW_FORMAT',
- 'KEY_BLOCK_SIZE'):
- self._add_option_word(option)
-
- self._add_option_regex('UNION', r'\([^\)]+\)')
- self._add_option_regex('TABLESPACE', r'.*? STORAGE DISK')
- self._add_option_regex(
- 'RAID_TYPE',
- r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+')
-
- _optional_equals = r'(?:\s*(?:=\s*)|\s+)'
-
- def _add_option_string(self, directive):
- regex = (r'(?P<directive>%s)%s'
- r"'(?P<val>(?:[^']|'')*?)'(?!')" %
- (re.escape(directive), self._optional_equals))
- self._pr_options.append(_pr_compile(
- regex, lambda v: v.replace("\\\\", "\\").replace("''", "'")
- ))
-
- def _add_option_word(self, directive):
- regex = (r'(?P<directive>%s)%s'
- r'(?P<val>\w+)' %
- (re.escape(directive), self._optional_equals))
- self._pr_options.append(_pr_compile(regex))
-
- def _add_option_regex(self, directive, regex):
- regex = (r'(?P<directive>%s)%s'
- r'(?P<val>%s)' %
- (re.escape(directive), self._optional_equals, regex))
- self._pr_options.append(_pr_compile(regex))
-
-_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
- 'PASSWORD', 'CONNECTION')
-
class _DecodingRowProxy(object):
"""Return unicode-decoded values based on type inspection.
@@ -3397,14 +1961,3 @@ class _DecodingRowProxy(object):
else:
return item
-
-def _pr_compile(regex, cleanup=None):
- """Prepare a 2-tuple of compiled regex and callable."""
-
- return (_re_compile(regex), cleanup)
-
-
-def _re_compile(regex):
- """Compile a string to regex, I and UNICODE."""
-
- return re.compile(regex, re.I | re.UNICODE)
diff --git a/lib/sqlalchemy/dialects/mysql/enumerated.py b/lib/sqlalchemy/dialects/mysql/enumerated.py
new file mode 100644
index 000000000..53de2b5fe
--- /dev/null
+++ b/lib/sqlalchemy/dialects/mysql/enumerated.py
@@ -0,0 +1,307 @@
+# mysql/enumerated.py
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import re
+
+from .types import _StringType
+from ... import exc, sql, util
+from ... import types as sqltypes
+
+
+class _EnumeratedValues(_StringType):
+ def _init_values(self, values, kw):
+ self.quoting = kw.pop('quoting', 'auto')
+
+ if self.quoting == 'auto' and len(values):
+ # What quoting character are we using?
+ q = None
+ for e in values:
+ if len(e) == 0:
+ self.quoting = 'unquoted'
+ break
+ elif q is None:
+ q = e[0]
+
+ if len(e) == 1 or e[0] != q or e[-1] != q:
+ self.quoting = 'unquoted'
+ break
+ else:
+ self.quoting = 'quoted'
+
+ if self.quoting == 'quoted':
+ util.warn_deprecated(
+ 'Manually quoting %s value literals is deprecated. Supply '
+ 'unquoted values and use the quoting= option in cases of '
+ 'ambiguity.' % self.__class__.__name__)
+
+ values = self._strip_values(values)
+
+ self._enumerated_values = values
+ length = max([len(v) for v in values] + [0])
+ return values, length
+
+ @classmethod
+ def _strip_values(cls, values):
+ strip_values = []
+ for a in values:
+ if a[0:1] == '"' or a[0:1] == "'":
+ # strip enclosing quotes and unquote interior
+ a = a[1:-1].replace(a[0] * 2, a[0])
+ strip_values.append(a)
+ return strip_values
+
+
+class ENUM(sqltypes.Enum, _EnumeratedValues):
+ """MySQL ENUM type."""
+
+ __visit_name__ = 'ENUM'
+
+ def __init__(self, *enums, **kw):
+ """Construct an ENUM.
+
+ E.g.::
+
+ Column('myenum', ENUM("foo", "bar", "baz"))
+
+ :param enums: The range of valid values for this ENUM. Values will be
+ quoted when generating the schema according to the quoting flag (see
+ below).
+
+ :param strict: Defaults to False: ensure that a given value is in this
+ ENUM's range of permissible values when inserting or updating rows.
+ Note that MySQL will not raise a fatal error if you attempt to store
+ an out of range value- an alternate value will be stored instead.
+ (See MySQL ENUM documentation.)
+
+ :param charset: Optional, a column-level character set for this string
+ value. Takes precedence to 'ascii' or 'unicode' short-hand.
+
+ :param collation: Optional, a column-level collation for this string
+ value. Takes precedence to 'binary' short-hand.
+
+ :param ascii: Defaults to False: short-hand for the ``latin1``
+ character set, generates ASCII in schema.
+
+ :param unicode: Defaults to False: short-hand for the ``ucs2``
+ character set, generates UNICODE in schema.
+
+ :param binary: Defaults to False: short-hand, pick the binary
+ collation type that matches the column's character set. Generates
+ BINARY in schema. This does not affect the type of data stored,
+ only the collation of character data.
+
+ :param quoting: Defaults to 'auto': automatically determine enum value
+ quoting. If all enum values are surrounded by the same quoting
+ character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
+
+ 'quoted': values in enums are already quoted, they will be used
+ directly when generating the schema - this usage is deprecated.
+
+ 'unquoted': values in enums are not quoted, they will be escaped and
+ surrounded by single quotes when generating the schema.
+
+ Previous versions of this type always required manually quoted
+ values to be supplied; future versions will always quote the string
+ literals for you. This is a transitional option.
+
+ """
+ values, length = self._init_values(enums, kw)
+ self.strict = kw.pop('strict', False)
+ kw.pop('metadata', None)
+ kw.pop('schema', None)
+ kw.pop('name', None)
+ kw.pop('quote', None)
+ kw.pop('native_enum', None)
+ kw.pop('inherit_schema', None)
+ kw.pop('_create_events', None)
+ _StringType.__init__(self, length=length, **kw)
+ sqltypes.Enum.__init__(self, *values)
+
+ def __repr__(self):
+ return util.generic_repr(
+ self, to_inspect=[ENUM, _StringType, sqltypes.Enum])
+
+ def bind_processor(self, dialect):
+ super_convert = super(ENUM, self).bind_processor(dialect)
+
+ def process(value):
+ if self.strict and value is not None and value not in self.enums:
+ raise exc.InvalidRequestError('"%s" not a valid value for '
+ 'this enum' % value)
+ if super_convert:
+ return super_convert(value)
+ else:
+ return value
+ return process
+
+ def adapt(self, cls, **kw):
+ if issubclass(cls, ENUM):
+ kw['strict'] = self.strict
+ return sqltypes.Enum.adapt(self, cls, **kw)
+
+
+class SET(_EnumeratedValues):
+ """MySQL SET type."""
+
+ __visit_name__ = 'SET'
+
+ def __init__(self, *values, **kw):
+ """Construct a SET.
+
+ E.g.::
+
+ Column('myset', SET("foo", "bar", "baz"))
+
+
+ The list of potential values is required in the case that this
+ set will be used to generate DDL for a table, or if the
+ :paramref:`.SET.retrieve_as_bitwise` flag is set to True.
+
+ :param values: The range of valid values for this SET.
+
+ :param convert_unicode: Same flag as that of
+ :paramref:`.String.convert_unicode`.
+
+ :param collation: same as that of :paramref:`.String.collation`
+
+ :param charset: same as that of :paramref:`.VARCHAR.charset`.
+
+ :param ascii: same as that of :paramref:`.VARCHAR.ascii`.
+
+ :param unicode: same as that of :paramref:`.VARCHAR.unicode`.
+
+ :param binary: same as that of :paramref:`.VARCHAR.binary`.
+
+ :param quoting: Defaults to 'auto': automatically determine set value
+ quoting. If all values are surrounded by the same quoting
+ character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
+
+ 'quoted': values in enums are already quoted, they will be used
+ directly when generating the schema - this usage is deprecated.
+
+ 'unquoted': values in enums are not quoted, they will be escaped and
+ surrounded by single quotes when generating the schema.
+
+ Previous versions of this type always required manually quoted
+ values to be supplied; future versions will always quote the string
+ literals for you. This is a transitional option.
+
+ .. versionadded:: 0.9.0
+
+ :param retrieve_as_bitwise: if True, the data for the set type will be
+ persisted and selected using an integer value, where a set is coerced
+ into a bitwise mask for persistence. MySQL allows this mode which
+ has the advantage of being able to store values unambiguously,
+ such as the blank string ``''``. The datatype will appear
+ as the expression ``col + 0`` in a SELECT statement, so that the
+ value is coerced into an integer value in result sets.
+ This flag is required if one wishes
+ to persist a set that can store the blank string ``''`` as a value.
+
+ .. warning::
+
+ When using :paramref:`.mysql.SET.retrieve_as_bitwise`, it is
+ essential that the list of set values is expressed in the
+ **exact same order** as exists on the MySQL database.
+
+ .. versionadded:: 1.0.0
+
+
+ """
+ self.retrieve_as_bitwise = kw.pop('retrieve_as_bitwise', False)
+ values, length = self._init_values(values, kw)
+ self.values = tuple(values)
+ if not self.retrieve_as_bitwise and '' in values:
+ raise exc.ArgumentError(
+ "Can't use the blank value '' in a SET without "
+ "setting retrieve_as_bitwise=True")
+ if self.retrieve_as_bitwise:
+ self._bitmap = dict(
+ (value, 2 ** idx)
+ for idx, value in enumerate(self.values)
+ )
+ self._bitmap.update(
+ (2 ** idx, value)
+ for idx, value in enumerate(self.values)
+ )
+ kw.setdefault('length', length)
+ super(SET, self).__init__(**kw)
+
+ def column_expression(self, colexpr):
+ if self.retrieve_as_bitwise:
+ return sql.type_coerce(
+ sql.type_coerce(colexpr, sqltypes.Integer) + 0,
+ self
+ )
+ else:
+ return colexpr
+
+ def result_processor(self, dialect, coltype):
+ if self.retrieve_as_bitwise:
+ def process(value):
+ if value is not None:
+ value = int(value)
+
+ return set(
+ util.map_bits(self._bitmap.__getitem__, value)
+ )
+ else:
+ return None
+ else:
+ super_convert = super(SET, self).result_processor(dialect, coltype)
+
+ def process(value):
+ if isinstance(value, util.string_types):
+ # MySQLdb returns a string, let's parse
+ if super_convert:
+ value = super_convert(value)
+ return set(re.findall(r'[^,]+', value))
+ else:
+ # mysql-connector-python does a naive
+ # split(",") which throws in an empty string
+ if value is not None:
+ value.discard('')
+ return value
+ return process
+
+ def bind_processor(self, dialect):
+ super_convert = super(SET, self).bind_processor(dialect)
+ if self.retrieve_as_bitwise:
+ def process(value):
+ if value is None:
+ return None
+ elif isinstance(value, util.int_types + util.string_types):
+ if super_convert:
+ return super_convert(value)
+ else:
+ return value
+ else:
+ int_value = 0
+ for v in value:
+ int_value |= self._bitmap[v]
+ return int_value
+ else:
+
+ def process(value):
+ # accept strings and int (actually bitflag) values directly
+ if value is not None and not isinstance(
+ value, util.int_types + util.string_types):
+ value = ",".join(value)
+
+ if super_convert:
+ return super_convert(value)
+ else:
+ return value
+ return process
+
+ def adapt(self, impltype, **kw):
+ kw['retrieve_as_bitwise'] = self.retrieve_as_bitwise
+ return util.constructor_copy(
+ self, impltype,
+ *self.values,
+ **kw
+ )
diff --git a/lib/sqlalchemy/dialects/mysql/json.py b/lib/sqlalchemy/dialects/mysql/json.py
new file mode 100644
index 000000000..a30cdc841
--- /dev/null
+++ b/lib/sqlalchemy/dialects/mysql/json.py
@@ -0,0 +1,90 @@
+# mysql/json.py
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from __future__ import absolute_import
+
+import json
+
+from ...sql import elements
+from ... import types as sqltypes
+from ... import util
+
+
+class JSON(sqltypes.JSON):
+ """MySQL JSON type.
+
+ MySQL supports JSON as of version 5.7. Note that MariaDB does **not**
+ support JSON at the time of this writing.
+
+ The :class:`.mysql.JSON` type supports persistence of JSON values
+ as well as the core index operations provided by :class:`.types.JSON`
+ datatype, by adapting the operations to render the ``JSON_EXTRACT``
+ function at the database level.
+
+ .. versionadded:: 1.1
+
+ """
+
+ @util.memoized_property
+ def _str_impl(self):
+ return sqltypes.String(convert_unicode=True)
+
+ def bind_processor(self, dialect):
+ string_process = self._str_impl.bind_processor(dialect)
+
+ json_serializer = dialect._json_serializer or json.dumps
+
+ def process(value):
+ if value is self.NULL:
+ value = None
+ elif isinstance(value, elements.Null) or (
+ value is None and self.none_as_null
+ ):
+ return None
+
+ serialized = json_serializer(value)
+ if string_process:
+ serialized = string_process(serialized)
+ return serialized
+
+ return process
+
+ def result_processor(self, dialect, coltype):
+ string_process = self._str_impl.result_processor(dialect, coltype)
+ json_deserializer = dialect._json_deserializer or json.loads
+
+ def process(value):
+ if value is None:
+ return None
+ if string_process:
+ value = string_process(value)
+ return json_deserializer(value)
+ return process
+
+
+class JSONIndexType(sqltypes.JSON.JSONIndexType):
+ def bind_processor(self, dialect):
+ def process(value):
+ if isinstance(value, int):
+ return "$[%s]" % value
+ else:
+ return '$."%s"' % value
+
+ return process
+
+
+class JSONPathType(sqltypes.JSON.JSONPathType):
+ def bind_processor(self, dialect):
+ def process(value):
+ return "$%s" % (
+ "".join([
+ "[%s]" % elem if isinstance(elem, int)
+ else '."%s"' % elem for elem in value
+ ])
+ )
+
+ return process
diff --git a/lib/sqlalchemy/dialects/mysql/reflection.py b/lib/sqlalchemy/dialects/mysql/reflection.py
new file mode 100644
index 000000000..cf1078252
--- /dev/null
+++ b/lib/sqlalchemy/dialects/mysql/reflection.py
@@ -0,0 +1,449 @@
+# mysql/reflection.py
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import re
+from ... import log, util
+from ... import types as sqltypes
+from .enumerated import _EnumeratedValues, SET
+from .types import DATETIME, TIME, TIMESTAMP
+
+
+class ReflectedState(object):
+ """Stores raw information about a SHOW CREATE TABLE statement."""
+
+ def __init__(self):
+ self.columns = []
+ self.table_options = {}
+ self.table_name = None
+ self.keys = []
+ self.constraints = []
+
+
+@log.class_logger
+class MySQLTableDefinitionParser(object):
+ """Parses the results of a SHOW CREATE TABLE statement."""
+
+ def __init__(self, dialect, preparer):
+ self.dialect = dialect
+ self.preparer = preparer
+ self._prep_regexes()
+
+ def parse(self, show_create, charset):
+ state = ReflectedState()
+ state.charset = charset
+ for line in re.split(r'\r?\n', show_create):
+ if line.startswith(' ' + self.preparer.initial_quote):
+ self._parse_column(line, state)
+ # a regular table options line
+ elif line.startswith(') '):
+ self._parse_table_options(line, state)
+ # an ANSI-mode table options line
+ elif line == ')':
+ pass
+ elif line.startswith('CREATE '):
+ self._parse_table_name(line, state)
+ # Not present in real reflection, but may be if
+ # loading from a file.
+ elif not line:
+ pass
+ else:
+ type_, spec = self._parse_constraints(line)
+ if type_ is None:
+ util.warn("Unknown schema content: %r" % line)
+ elif type_ == 'key':
+ state.keys.append(spec)
+ elif type_ == 'constraint':
+ state.constraints.append(spec)
+ else:
+ pass
+ return state
+
+ def _parse_constraints(self, line):
+ """Parse a KEY or CONSTRAINT line.
+
+ :param line: A line of SHOW CREATE TABLE output
+ """
+
+ # KEY
+ m = self._re_key.match(line)
+ if m:
+ spec = m.groupdict()
+ # convert columns into name, length pairs
+ spec['columns'] = self._parse_keyexprs(spec['columns'])
+ return 'key', spec
+
+ # CONSTRAINT
+ m = self._re_constraint.match(line)
+ if m:
+ spec = m.groupdict()
+ spec['table'] = \
+ self.preparer.unformat_identifiers(spec['table'])
+ spec['local'] = [c[0]
+ for c in self._parse_keyexprs(spec['local'])]
+ spec['foreign'] = [c[0]
+ for c in self._parse_keyexprs(spec['foreign'])]
+ return 'constraint', spec
+
+ # PARTITION and SUBPARTITION
+ m = self._re_partition.match(line)
+ if m:
+ # Punt!
+ return 'partition', line
+
+ # No match.
+ return (None, line)
+
+ def _parse_table_name(self, line, state):
+ """Extract the table name.
+
+ :param line: The first line of SHOW CREATE TABLE
+ """
+
+ regex, cleanup = self._pr_name
+ m = regex.match(line)
+ if m:
+ state.table_name = cleanup(m.group('name'))
+
+ def _parse_table_options(self, line, state):
+ """Build a dictionary of all reflected table-level options.
+
+ :param line: The final line of SHOW CREATE TABLE output.
+ """
+
+ options = {}
+
+ if not line or line == ')':
+ pass
+
+ else:
+ rest_of_line = line[:]
+ for regex, cleanup in self._pr_options:
+ m = regex.search(rest_of_line)
+ if not m:
+ continue
+ directive, value = m.group('directive'), m.group('val')
+ if cleanup:
+ value = cleanup(value)
+ options[directive.lower()] = value
+ rest_of_line = regex.sub('', rest_of_line)
+
+ for nope in ('auto_increment', 'data directory', 'index directory'):
+ options.pop(nope, None)
+
+ for opt, val in options.items():
+ state.table_options['%s_%s' % (self.dialect.name, opt)] = val
+
+ def _parse_column(self, line, state):
+ """Extract column details.
+
+ Falls back to a 'minimal support' variant if full parse fails.
+
+ :param line: Any column-bearing line from SHOW CREATE TABLE
+ """
+
+ spec = None
+ m = self._re_column.match(line)
+ if m:
+ spec = m.groupdict()
+ spec['full'] = True
+ else:
+ m = self._re_column_loose.match(line)
+ if m:
+ spec = m.groupdict()
+ spec['full'] = False
+ if not spec:
+ util.warn("Unknown column definition %r" % line)
+ return
+ if not spec['full']:
+ util.warn("Incomplete reflection of column definition %r" % line)
+
+ name, type_, args = spec['name'], spec['coltype'], spec['arg']
+
+ try:
+ col_type = self.dialect.ischema_names[type_]
+ except KeyError:
+ util.warn("Did not recognize type '%s' of column '%s'" %
+ (type_, name))
+ col_type = sqltypes.NullType
+
+ # Column type positional arguments eg. varchar(32)
+ if args is None or args == '':
+ type_args = []
+ elif args[0] == "'" and args[-1] == "'":
+ type_args = self._re_csv_str.findall(args)
+ else:
+ type_args = [int(v) for v in self._re_csv_int.findall(args)]
+
+ # Column type keyword options
+ type_kw = {}
+
+ if issubclass(col_type, (DATETIME, TIME, TIMESTAMP)):
+ if type_args:
+ type_kw['fsp'] = type_args.pop(0)
+
+ for kw in ('unsigned', 'zerofill'):
+ if spec.get(kw, False):
+ type_kw[kw] = True
+ for kw in ('charset', 'collate'):
+ if spec.get(kw, False):
+ type_kw[kw] = spec[kw]
+ if issubclass(col_type, _EnumeratedValues):
+ type_args = _EnumeratedValues._strip_values(type_args)
+
+ if issubclass(col_type, SET) and '' in type_args:
+ type_kw['retrieve_as_bitwise'] = True
+
+ type_instance = col_type(*type_args, **type_kw)
+
+ col_kw = {}
+
+ # NOT NULL
+ col_kw['nullable'] = True
+ # this can be "NULL" in the case of TIMESTAMP
+ if spec.get('notnull', False) == 'NOT NULL':
+ col_kw['nullable'] = False
+
+ # AUTO_INCREMENT
+ if spec.get('autoincr', False):
+ col_kw['autoincrement'] = True
+ elif issubclass(col_type, sqltypes.Integer):
+ col_kw['autoincrement'] = False
+
+ # DEFAULT
+ default = spec.get('default', None)
+
+ if default == 'NULL':
+ # eliminates the need to deal with this later.
+ default = None
+
+ col_d = dict(name=name, type=type_instance, default=default)
+ col_d.update(col_kw)
+ state.columns.append(col_d)
+
+ def _describe_to_create(self, table_name, columns):
+ """Re-format DESCRIBE output as a SHOW CREATE TABLE string.
+
+ DESCRIBE is a much simpler reflection and is sufficient for
+ reflecting views for runtime use. This method formats DDL
+ for columns only- keys are omitted.
+
+ :param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples.
+ SHOW FULL COLUMNS FROM rows must be rearranged for use with
+ this function.
+ """
+
+ buffer = []
+ for row in columns:
+ (name, col_type, nullable, default, extra) = \
+ [row[i] for i in (0, 1, 2, 4, 5)]
+
+ line = [' ']
+ line.append(self.preparer.quote_identifier(name))
+ line.append(col_type)
+ if not nullable:
+ line.append('NOT NULL')
+ if default:
+ if 'auto_increment' in default:
+ pass
+ elif (col_type.startswith('timestamp') and
+ default.startswith('C')):
+ line.append('DEFAULT')
+ line.append(default)
+ elif default == 'NULL':
+ line.append('DEFAULT')
+ line.append(default)
+ else:
+ line.append('DEFAULT')
+ line.append("'%s'" % default.replace("'", "''"))
+ if extra:
+ line.append(extra)
+
+ buffer.append(' '.join(line))
+
+ return ''.join([('CREATE TABLE %s (\n' %
+ self.preparer.quote_identifier(table_name)),
+ ',\n'.join(buffer),
+ '\n) '])
+
+ def _parse_keyexprs(self, identifiers):
+ """Unpack '"col"(2),"col" ASC'-ish strings into components."""
+
+ return self._re_keyexprs.findall(identifiers)
+
+ def _prep_regexes(self):
+ """Pre-compile regular expressions."""
+
+ self._re_columns = []
+ self._pr_options = []
+
+ _final = self.preparer.final_quote
+
+ quotes = dict(zip(('iq', 'fq', 'esc_fq'),
+ [re.escape(s) for s in
+ (self.preparer.initial_quote,
+ _final,
+ self.preparer._escape_identifier(_final))]))
+
+ self._pr_name = _pr_compile(
+ r'^CREATE (?:\w+ +)?TABLE +'
+ r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($' % quotes,
+ self.preparer._unescape_identifier)
+
+ # `col`,`col2`(32),`col3`(15) DESC
+ #
+ # Note: ASC and DESC aren't reflected, so we'll punt...
+ self._re_keyexprs = _re_compile(
+ r'(?:'
+ r'(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)'
+ r'(?:\((\d+)\))?(?=\,|$))+' % quotes)
+
+ # 'foo' or 'foo','bar' or 'fo,o','ba''a''r'
+ self._re_csv_str = _re_compile(r'\x27(?:\x27\x27|[^\x27])*\x27')
+
+ # 123 or 123,456
+ self._re_csv_int = _re_compile(r'\d+')
+
+ # `colname` <type> [type opts]
+ # (NOT NULL | NULL)
+ # DEFAULT ('value' | CURRENT_TIMESTAMP...)
+ # COMMENT 'comment'
+ # COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT)
+ # STORAGE (DISK|MEMORY)
+ self._re_column = _re_compile(
+ r' '
+ r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
+ r'(?P<coltype>\w+)'
+ r'(?:\((?P<arg>(?:\d+|\d+,\d+|'
+ r'(?:\x27(?:\x27\x27|[^\x27])*\x27,?)+))\))?'
+ r'(?: +(?P<unsigned>UNSIGNED))?'
+ r'(?: +(?P<zerofill>ZEROFILL))?'
+ r'(?: +CHARACTER SET +(?P<charset>[\w_]+))?'
+ r'(?: +COLLATE +(?P<collate>[\w_]+))?'
+ r'(?: +(?P<notnull>(?:NOT )?NULL))?'
+ r'(?: +DEFAULT +(?P<default>'
+ r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+'
+ r'(?: +ON UPDATE \w+)?)'
+ r'))?'
+ r'(?: +(?P<autoincr>AUTO_INCREMENT))?'
+ r'(?: +COMMENT +(P<comment>(?:\x27\x27|[^\x27])+))?'
+ r'(?: +COLUMN_FORMAT +(?P<colfmt>\w+))?'
+ r'(?: +STORAGE +(?P<storage>\w+))?'
+ r'(?: +(?P<extra>.*))?'
+ r',?$'
+ % quotes
+ )
+
+ # Fallback, try to parse as little as possible
+ self._re_column_loose = _re_compile(
+ r' '
+ r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
+ r'(?P<coltype>\w+)'
+ r'(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?'
+ r'.*?(?P<notnull>(?:NOT )NULL)?'
+ % quotes
+ )
+
+ # (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))?
+ # (`col` (ASC|DESC)?, `col` (ASC|DESC)?)
+ # KEY_BLOCK_SIZE size | WITH PARSER name
+ self._re_key = _re_compile(
+ r' '
+ r'(?:(?P<type>\S+) )?KEY'
+ r'(?: +%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?'
+ r'(?: +USING +(?P<using_pre>\S+))?'
+ r' +\((?P<columns>.+?)\)'
+ r'(?: +USING +(?P<using_post>\S+))?'
+ r'(?: +KEY_BLOCK_SIZE *[ =]? *(?P<keyblock>\S+))?'
+ r'(?: +WITH PARSER +(?P<parser>\S+))?'
+ r',?$'
+ % quotes
+ )
+
+ # CONSTRAINT `name` FOREIGN KEY (`local_col`)
+ # REFERENCES `remote` (`remote_col`)
+ # MATCH FULL | MATCH PARTIAL | MATCH SIMPLE
+ # ON DELETE CASCADE ON UPDATE RESTRICT
+ #
+ # unique constraints come back as KEYs
+ kw = quotes.copy()
+ kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION'
+ self._re_constraint = _re_compile(
+ r' '
+ r'CONSTRAINT +'
+ r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
+ r'FOREIGN KEY +'
+ r'\((?P<local>[^\)]+?)\) REFERENCES +'
+ r'(?P<table>%(iq)s[^%(fq)s]+%(fq)s'
+ r'(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +'
+ r'\((?P<foreign>[^\)]+?)\)'
+ r'(?: +(?P<match>MATCH \w+))?'
+ r'(?: +ON DELETE (?P<ondelete>%(on)s))?'
+ r'(?: +ON UPDATE (?P<onupdate>%(on)s))?'
+ % kw
+ )
+
+ # PARTITION
+ #
+ # punt!
+ self._re_partition = _re_compile(r'(?:.*)(?:SUB)?PARTITION(?:.*)')
+
+ # Table-level options (COLLATE, ENGINE, etc.)
+ # Do the string options first, since they have quoted
+ # strings we need to get rid of.
+ for option in _options_of_type_string:
+ self._add_option_string(option)
+
+ for option in ('ENGINE', 'TYPE', 'AUTO_INCREMENT',
+ 'AVG_ROW_LENGTH', 'CHARACTER SET',
+ 'DEFAULT CHARSET', 'CHECKSUM',
+ 'COLLATE', 'DELAY_KEY_WRITE', 'INSERT_METHOD',
+ 'MAX_ROWS', 'MIN_ROWS', 'PACK_KEYS', 'ROW_FORMAT',
+ 'KEY_BLOCK_SIZE'):
+ self._add_option_word(option)
+
+ self._add_option_regex('UNION', r'\([^\)]+\)')
+ self._add_option_regex('TABLESPACE', r'.*? STORAGE DISK')
+ self._add_option_regex(
+ 'RAID_TYPE',
+ r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+')
+
+ _optional_equals = r'(?:\s*(?:=\s*)|\s+)'
+
+ def _add_option_string(self, directive):
+ regex = (r'(?P<directive>%s)%s'
+ r"'(?P<val>(?:[^']|'')*?)'(?!')" %
+ (re.escape(directive), self._optional_equals))
+ self._pr_options.append(_pr_compile(
+ regex, lambda v: v.replace("\\\\", "\\").replace("''", "'")
+ ))
+
+ def _add_option_word(self, directive):
+ regex = (r'(?P<directive>%s)%s'
+ r'(?P<val>\w+)' %
+ (re.escape(directive), self._optional_equals))
+ self._pr_options.append(_pr_compile(regex))
+
+ def _add_option_regex(self, directive, regex):
+ regex = (r'(?P<directive>%s)%s'
+ r'(?P<val>%s)' %
+ (re.escape(directive), self._optional_equals, regex))
+ self._pr_options.append(_pr_compile(regex))
+
+_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
+ 'PASSWORD', 'CONNECTION')
+
+
+def _pr_compile(regex, cleanup=None):
+ """Prepare a 2-tuple of compiled regex and callable."""
+
+ return (_re_compile(regex), cleanup)
+
+
+def _re_compile(regex):
+ """Compile a string to regex, I and UNICODE."""
+
+ return re.compile(regex, re.I | re.UNICODE)
diff --git a/lib/sqlalchemy/dialects/mysql/types.py b/lib/sqlalchemy/dialects/mysql/types.py
new file mode 100644
index 000000000..9512982f5
--- /dev/null
+++ b/lib/sqlalchemy/dialects/mysql/types.py
@@ -0,0 +1,766 @@
+# mysql/types.py
+# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import datetime
+from ... import exc, util
+from ... import types as sqltypes
+
+
+class _NumericType(object):
+ """Base for MySQL numeric types.
+
+ This is the base both for NUMERIC as well as INTEGER, hence
+ it's a mixin.
+
+ """
+
+ def __init__(self, unsigned=False, zerofill=False, **kw):
+ self.unsigned = unsigned
+ self.zerofill = zerofill
+ super(_NumericType, self).__init__(**kw)
+
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_NumericType, sqltypes.Numeric])
+
+
+class _FloatType(_NumericType, sqltypes.Float):
+ def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+ if isinstance(self, (REAL, DOUBLE)) and \
+ (
+ (precision is None and scale is not None) or
+ (precision is not None and scale is None)
+ ):
+ raise exc.ArgumentError(
+ "You must specify both precision and scale or omit "
+ "both altogether.")
+ super(_FloatType, self).__init__(
+ precision=precision, asdecimal=asdecimal, **kw)
+ self.scale = scale
+
+ def __repr__(self):
+ return util.generic_repr(self, to_inspect=[_FloatType,
+ _NumericType,
+ sqltypes.Float])
+
+
+class _IntegerType(_NumericType, sqltypes.Integer):
+ def __init__(self, display_width=None, **kw):
+ self.display_width = display_width
+ super(_IntegerType, self).__init__(**kw)
+
+ def __repr__(self):
+ return util.generic_repr(self, to_inspect=[_IntegerType,
+ _NumericType,
+ sqltypes.Integer])
+
+
+class _StringType(sqltypes.String):
+ """Base for MySQL string types."""
+
+ def __init__(self, charset=None, collation=None,
+ ascii=False, binary=False, unicode=False,
+ national=False, **kw):
+ self.charset = charset
+
+ # allow collate= or collation=
+ kw.setdefault('collation', kw.pop('collate', collation))
+
+ self.ascii = ascii
+ self.unicode = unicode
+ self.binary = binary
+ self.national = national
+ super(_StringType, self).__init__(**kw)
+
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_StringType, sqltypes.String])
+
+
+class _MatchType(sqltypes.Float, sqltypes.MatchType):
+ def __init__(self, **kw):
+ # TODO: float arguments?
+ sqltypes.Float.__init__(self)
+ sqltypes.MatchType.__init__(self)
+
+
+
+class NUMERIC(_NumericType, sqltypes.NUMERIC):
+ """MySQL NUMERIC type."""
+
+ __visit_name__ = 'NUMERIC'
+
+ def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+ """Construct a NUMERIC.
+
+ :param precision: Total digits in this number. If scale and precision
+ are both None, values are stored to limits allowed by the server.
+
+ :param scale: The number of digits after the decimal point.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(NUMERIC, self).__init__(precision=precision,
+ scale=scale, asdecimal=asdecimal, **kw)
+
+
+class DECIMAL(_NumericType, sqltypes.DECIMAL):
+ """MySQL DECIMAL type."""
+
+ __visit_name__ = 'DECIMAL'
+
+ def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+ """Construct a DECIMAL.
+
+ :param precision: Total digits in this number. If scale and precision
+ are both None, values are stored to limits allowed by the server.
+
+ :param scale: The number of digits after the decimal point.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(DECIMAL, self).__init__(precision=precision, scale=scale,
+ asdecimal=asdecimal, **kw)
+
+
+class DOUBLE(_FloatType):
+ """MySQL DOUBLE type."""
+
+ __visit_name__ = 'DOUBLE'
+
+ def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+ """Construct a DOUBLE.
+
+ .. note::
+
+ The :class:`.DOUBLE` type by default converts from float
+ to Decimal, using a truncation that defaults to 10 digits.
+ Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
+ to change this scale, or ``asdecimal=False`` to return values
+ directly as Python floating points.
+
+ :param precision: Total digits in this number. If scale and precision
+ are both None, values are stored to limits allowed by the server.
+
+ :param scale: The number of digits after the decimal point.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(DOUBLE, self).__init__(precision=precision, scale=scale,
+ asdecimal=asdecimal, **kw)
+
+
+class REAL(_FloatType, sqltypes.REAL):
+ """MySQL REAL type."""
+
+ __visit_name__ = 'REAL'
+
+ def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
+ """Construct a REAL.
+
+ .. note::
+
+ The :class:`.REAL` type by default converts from float
+ to Decimal, using a truncation that defaults to 10 digits.
+ Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
+ to change this scale, or ``asdecimal=False`` to return values
+ directly as Python floating points.
+
+ :param precision: Total digits in this number. If scale and precision
+ are both None, values are stored to limits allowed by the server.
+
+ :param scale: The number of digits after the decimal point.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(REAL, self).__init__(precision=precision, scale=scale,
+ asdecimal=asdecimal, **kw)
+
+
+class FLOAT(_FloatType, sqltypes.FLOAT):
+ """MySQL FLOAT type."""
+
+ __visit_name__ = 'FLOAT'
+
+ def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
+ """Construct a FLOAT.
+
+ :param precision: Total digits in this number. If scale and precision
+ are both None, values are stored to limits allowed by the server.
+
+ :param scale: The number of digits after the decimal point.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(FLOAT, self).__init__(precision=precision, scale=scale,
+ asdecimal=asdecimal, **kw)
+
+ def bind_processor(self, dialect):
+ return None
+
+
+class INTEGER(_IntegerType, sqltypes.INTEGER):
+ """MySQL INTEGER type."""
+
+ __visit_name__ = 'INTEGER'
+
+ def __init__(self, display_width=None, **kw):
+ """Construct an INTEGER.
+
+ :param display_width: Optional, maximum display width for this number.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(INTEGER, self).__init__(display_width=display_width, **kw)
+
+
+class BIGINT(_IntegerType, sqltypes.BIGINT):
+ """MySQL BIGINTEGER type."""
+
+ __visit_name__ = 'BIGINT'
+
+ def __init__(self, display_width=None, **kw):
+ """Construct a BIGINTEGER.
+
+ :param display_width: Optional, maximum display width for this number.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(BIGINT, self).__init__(display_width=display_width, **kw)
+
+
+class MEDIUMINT(_IntegerType):
+ """MySQL MEDIUMINTEGER type."""
+
+ __visit_name__ = 'MEDIUMINT'
+
+ def __init__(self, display_width=None, **kw):
+ """Construct a MEDIUMINTEGER
+
+ :param display_width: Optional, maximum display width for this number.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(MEDIUMINT, self).__init__(display_width=display_width, **kw)
+
+
+class TINYINT(_IntegerType):
+ """MySQL TINYINT type."""
+
+ __visit_name__ = 'TINYINT'
+
+ def __init__(self, display_width=None, **kw):
+ """Construct a TINYINT.
+
+ :param display_width: Optional, maximum display width for this number.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(TINYINT, self).__init__(display_width=display_width, **kw)
+
+
+class SMALLINT(_IntegerType, sqltypes.SMALLINT):
+ """MySQL SMALLINTEGER type."""
+
+ __visit_name__ = 'SMALLINT'
+
+ def __init__(self, display_width=None, **kw):
+ """Construct a SMALLINTEGER.
+
+ :param display_width: Optional, maximum display width for this number.
+
+ :param unsigned: a boolean, optional.
+
+ :param zerofill: Optional. If true, values will be stored as strings
+ left-padded with zeros. Note that this does not effect the values
+ returned by the underlying database API, which continue to be
+ numeric.
+
+ """
+ super(SMALLINT, self).__init__(display_width=display_width, **kw)
+
+
+class BIT(sqltypes.TypeEngine):
+ """MySQL BIT type.
+
+ This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
+ for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a
+ MSTinyInteger() type.
+
+ """
+
+ __visit_name__ = 'BIT'
+
+ def __init__(self, length=None):
+ """Construct a BIT.
+
+ :param length: Optional, number of bits.
+
+ """
+ self.length = length
+
+ def result_processor(self, dialect, coltype):
+ """Convert a MySQL's 64 bit, variable length binary string to a long.
+
+ TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector
+ already do this, so this logic should be moved to those dialects.
+
+ """
+
+ def process(value):
+ if value is not None:
+ v = 0
+ for i in value:
+ if not isinstance(i, int):
+ i = ord(i) # convert byte to int on Python 2
+ v = v << 8 | i
+ return v
+ return value
+ return process
+
+
+class TIME(sqltypes.TIME):
+ """MySQL TIME type. """
+
+ __visit_name__ = 'TIME'
+
+ def __init__(self, timezone=False, fsp=None):
+ """Construct a MySQL TIME type.
+
+ :param timezone: not used by the MySQL dialect.
+ :param fsp: fractional seconds precision value.
+ MySQL 5.6 supports storage of fractional seconds;
+ this parameter will be used when emitting DDL
+ for the TIME type.
+
+ .. note::
+
+ DBAPI driver support for fractional seconds may
+ be limited; current support includes
+ MySQL Connector/Python.
+
+ .. versionadded:: 0.8 The MySQL-specific TIME
+ type as well as fractional seconds support.
+
+ """
+ super(TIME, self).__init__(timezone=timezone)
+ self.fsp = fsp
+
+ def result_processor(self, dialect, coltype):
+ time = datetime.time
+
+ def process(value):
+ # convert from a timedelta value
+ if value is not None:
+ microseconds = value.microseconds
+ seconds = value.seconds
+ minutes = seconds // 60
+ return time(minutes // 60,
+ minutes % 60,
+ seconds - minutes * 60,
+ microsecond=microseconds)
+ else:
+ return None
+ return process
+
+
+class TIMESTAMP(sqltypes.TIMESTAMP):
+ """MySQL TIMESTAMP type.
+
+ """
+
+ __visit_name__ = 'TIMESTAMP'
+
+ def __init__(self, timezone=False, fsp=None):
+ """Construct a MySQL TIMESTAMP type.
+
+ :param timezone: not used by the MySQL dialect.
+ :param fsp: fractional seconds precision value.
+ MySQL 5.6.4 supports storage of fractional seconds;
+ this parameter will be used when emitting DDL
+ for the TIMESTAMP type.
+
+ .. note::
+
+ DBAPI driver support for fractional seconds may
+ be limited; current support includes
+ MySQL Connector/Python.
+
+ .. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.TIMESTAMP`
+ with fractional seconds support.
+
+ """
+ super(TIMESTAMP, self).__init__(timezone=timezone)
+ self.fsp = fsp
+
+
+class DATETIME(sqltypes.DATETIME):
+ """MySQL DATETIME type.
+
+ """
+
+ __visit_name__ = 'DATETIME'
+
+ def __init__(self, timezone=False, fsp=None):
+ """Construct a MySQL DATETIME type.
+
+ :param timezone: not used by the MySQL dialect.
+ :param fsp: fractional seconds precision value.
+ MySQL 5.6.4 supports storage of fractional seconds;
+ this parameter will be used when emitting DDL
+ for the DATETIME type.
+
+ .. note::
+
+ DBAPI driver support for fractional seconds may
+ be limited; current support includes
+ MySQL Connector/Python.
+
+ .. versionadded:: 0.8.5 Added MySQL-specific :class:`.mysql.DATETIME`
+ with fractional seconds support.
+
+ """
+ super(DATETIME, self).__init__(timezone=timezone)
+ self.fsp = fsp
+
+
+class YEAR(sqltypes.TypeEngine):
+ """MySQL YEAR type, for single byte storage of years 1901-2155."""
+
+ __visit_name__ = 'YEAR'
+
+ def __init__(self, display_width=None):
+ self.display_width = display_width
+
+
+class TEXT(_StringType, sqltypes.TEXT):
+ """MySQL TEXT type, for text up to 2^16 characters."""
+
+ __visit_name__ = 'TEXT'
+
+ def __init__(self, length=None, **kw):
+ """Construct a TEXT.
+
+ :param length: Optional, if provided the server may optimize storage
+ by substituting the smallest TEXT type sufficient to store
+ ``length`` characters.
+
+ :param charset: Optional, a column-level character set for this string
+ value. Takes precedence to 'ascii' or 'unicode' short-hand.
+
+ :param collation: Optional, a column-level collation for this string
+ value. Takes precedence to 'binary' short-hand.
+
+ :param ascii: Defaults to False: short-hand for the ``latin1``
+ character set, generates ASCII in schema.
+
+ :param unicode: Defaults to False: short-hand for the ``ucs2``
+ character set, generates UNICODE in schema.
+
+ :param national: Optional. If true, use the server's configured
+ national character set.
+
+ :param binary: Defaults to False: short-hand, pick the binary
+ collation type that matches the column's character set. Generates
+ BINARY in schema. This does not affect the type of data stored,
+ only the collation of character data.
+
+ """
+ super(TEXT, self).__init__(length=length, **kw)
+
+
+class TINYTEXT(_StringType):
+ """MySQL TINYTEXT type, for text up to 2^8 characters."""
+
+ __visit_name__ = 'TINYTEXT'
+
+ def __init__(self, **kwargs):
+ """Construct a TINYTEXT.
+
+ :param charset: Optional, a column-level character set for this string
+ value. Takes precedence to 'ascii' or 'unicode' short-hand.
+
+ :param collation: Optional, a column-level collation for this string
+ value. Takes precedence to 'binary' short-hand.
+
+ :param ascii: Defaults to False: short-hand for the ``latin1``
+ character set, generates ASCII in schema.
+
+ :param unicode: Defaults to False: short-hand for the ``ucs2``
+ character set, generates UNICODE in schema.
+
+ :param national: Optional. If true, use the server's configured
+ national character set.
+
+ :param binary: Defaults to False: short-hand, pick the binary
+ collation type that matches the column's character set. Generates
+ BINARY in schema. This does not affect the type of data stored,
+ only the collation of character data.
+
+ """
+ super(TINYTEXT, self).__init__(**kwargs)
+
+
+class MEDIUMTEXT(_StringType):
+ """MySQL MEDIUMTEXT type, for text up to 2^24 characters."""
+
+ __visit_name__ = 'MEDIUMTEXT'
+
+ def __init__(self, **kwargs):
+ """Construct a MEDIUMTEXT.
+
+ :param charset: Optional, a column-level character set for this string
+ value. Takes precedence to 'ascii' or 'unicode' short-hand.
+
+ :param collation: Optional, a column-level collation for this string
+ value. Takes precedence to 'binary' short-hand.
+
+ :param ascii: Defaults to False: short-hand for the ``latin1``
+ character set, generates ASCII in schema.
+
+ :param unicode: Defaults to False: short-hand for the ``ucs2``
+ character set, generates UNICODE in schema.
+
+ :param national: Optional. If true, use the server's configured
+ national character set.
+
+ :param binary: Defaults to False: short-hand, pick the binary
+ collation type that matches the column's character set. Generates
+ BINARY in schema. This does not affect the type of data stored,
+ only the collation of character data.
+
+ """
+ super(MEDIUMTEXT, self).__init__(**kwargs)
+
+
+class LONGTEXT(_StringType):
+ """MySQL LONGTEXT type, for text up to 2^32 characters."""
+
+ __visit_name__ = 'LONGTEXT'
+
+ def __init__(self, **kwargs):
+ """Construct a LONGTEXT.
+
+ :param charset: Optional, a column-level character set for this string
+ value. Takes precedence to 'ascii' or 'unicode' short-hand.
+
+ :param collation: Optional, a column-level collation for this string
+ value. Takes precedence to 'binary' short-hand.
+
+ :param ascii: Defaults to False: short-hand for the ``latin1``
+ character set, generates ASCII in schema.
+
+ :param unicode: Defaults to False: short-hand for the ``ucs2``
+ character set, generates UNICODE in schema.
+
+ :param national: Optional. If true, use the server's configured
+ national character set.
+
+ :param binary: Defaults to False: short-hand, pick the binary
+ collation type that matches the column's character set. Generates
+ BINARY in schema. This does not affect the type of data stored,
+ only the collation of character data.
+
+ """
+ super(LONGTEXT, self).__init__(**kwargs)
+
+
+class VARCHAR(_StringType, sqltypes.VARCHAR):
+ """MySQL VARCHAR type, for variable-length character data."""
+
+ __visit_name__ = 'VARCHAR'
+
+ def __init__(self, length=None, **kwargs):
+ """Construct a VARCHAR.
+
+ :param charset: Optional, a column-level character set for this string
+ value. Takes precedence to 'ascii' or 'unicode' short-hand.
+
+ :param collation: Optional, a column-level collation for this string
+ value. Takes precedence to 'binary' short-hand.
+
+ :param ascii: Defaults to False: short-hand for the ``latin1``
+ character set, generates ASCII in schema.
+
+ :param unicode: Defaults to False: short-hand for the ``ucs2``
+ character set, generates UNICODE in schema.
+
+ :param national: Optional. If true, use the server's configured
+ national character set.
+
+ :param binary: Defaults to False: short-hand, pick the binary
+ collation type that matches the column's character set. Generates
+ BINARY in schema. This does not affect the type of data stored,
+ only the collation of character data.
+
+ """
+ super(VARCHAR, self).__init__(length=length, **kwargs)
+
+
+class CHAR(_StringType, sqltypes.CHAR):
+ """MySQL CHAR type, for fixed-length character data."""
+
+ __visit_name__ = 'CHAR'
+
+ def __init__(self, length=None, **kwargs):
+ """Construct a CHAR.
+
+ :param length: Maximum data length, in characters.
+
+ :param binary: Optional, use the default binary collation for the
+ national character set. This does not affect the type of data
+ stored, use a BINARY type for binary data.
+
+ :param collation: Optional, request a particular collation. Must be
+ compatible with the national character set.
+
+ """
+ super(CHAR, self).__init__(length=length, **kwargs)
+
+ @classmethod
+ def _adapt_string_for_cast(self, type_):
+ # copy the given string type into a CHAR
+ # for the purposes of rendering a CAST expression
+ type_ = sqltypes.to_instance(type_)
+ if isinstance(type_, sqltypes.CHAR):
+ return type_
+ elif isinstance(type_, _StringType):
+ return CHAR(
+ length=type_.length,
+ charset=type_.charset,
+ collation=type_.collation,
+ ascii=type_.ascii,
+ binary=type_.binary,
+ unicode=type_.unicode,
+ national=False # not supported in CAST
+ )
+ else:
+ return CHAR(length=type_.length)
+
+
+class NVARCHAR(_StringType, sqltypes.NVARCHAR):
+ """MySQL NVARCHAR type.
+
+ For variable-length character data in the server's configured national
+ character set.
+ """
+
+ __visit_name__ = 'NVARCHAR'
+
+ def __init__(self, length=None, **kwargs):
+ """Construct an NVARCHAR.
+
+ :param length: Maximum data length, in characters.
+
+ :param binary: Optional, use the default binary collation for the
+ national character set. This does not affect the type of data
+ stored, use a BINARY type for binary data.
+
+ :param collation: Optional, request a particular collation. Must be
+ compatible with the national character set.
+
+ """
+ kwargs['national'] = True
+ super(NVARCHAR, self).__init__(length=length, **kwargs)
+
+
+class NCHAR(_StringType, sqltypes.NCHAR):
+ """MySQL NCHAR type.
+
+ For fixed-length character data in the server's configured national
+ character set.
+ """
+
+ __visit_name__ = 'NCHAR'
+
+ def __init__(self, length=None, **kwargs):
+ """Construct an NCHAR.
+
+ :param length: Maximum data length, in characters.
+
+ :param binary: Optional, use the default binary collation for the
+ national character set. This does not affect the type of data
+ stored, use a BINARY type for binary data.
+
+ :param collation: Optional, request a particular collation. Must be
+ compatible with the national character set.
+
+ """
+ kwargs['national'] = True
+ super(NCHAR, self).__init__(length=length, **kwargs)
+
+
+class TINYBLOB(sqltypes._Binary):
+ """MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
+
+ __visit_name__ = 'TINYBLOB'
+
+
+class MEDIUMBLOB(sqltypes._Binary):
+ """MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
+
+ __visit_name__ = 'MEDIUMBLOB'
+
+
+class LONGBLOB(sqltypes._Binary):
+ """MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
+
+ __visit_name__ = 'LONGBLOB'
diff --git a/lib/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
index 3fbe921ca..8872daf81 100644
--- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
@@ -10,8 +10,7 @@
:name: zxJDBC for Jython
:dbapi: zxjdbc
:connectstring: oracle+zxjdbc://user:pass@host/dbname
- :driverurl: http://www.oracle.com/technology/software/tech/java/\
-sqlj_jdbc/index.html.
+ :driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html
.. note:: Jython is not supported by current versions of SQLAlchemy. The
zxjdbc dialect should be considered as experimental.
diff --git a/lib/sqlalchemy/dialects/postgresql/array.py b/lib/sqlalchemy/dialects/postgresql/array.py
index b88f139de..f4316d318 100644
--- a/lib/sqlalchemy/dialects/postgresql/array.py
+++ b/lib/sqlalchemy/dialects/postgresql/array.py
@@ -84,12 +84,20 @@ class array(expression.Tuple):
super(array, self).__init__(*clauses, **kw)
self.type = ARRAY(self.type)
- def _bind_param(self, operator, obj):
- return array([
- expression.BindParameter(None, o, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
- for o in obj
- ])
+ def _bind_param(self, operator, obj, _assume_scalar=False, type_=None):
+ if _assume_scalar or operator is operators.getitem:
+ # if getitem->slice were called, Indexable produces
+ # a Slice object from that
+ assert isinstance(obj, int)
+ return expression.BindParameter(
+ None, obj, _compared_to_operator=operator,
+ type_=type_,
+ _compared_to_type=self.type, unique=True)
+
+ else:
+ return array([
+ self._bind_param(operator, o, _assume_scalar=True, type_=type_)
+ for o in obj])
def self_group(self, against=None):
if (against in (
@@ -106,15 +114,15 @@ CONTAINED_BY = operators.custom_op("<@", precedence=5)
OVERLAP = operators.custom_op("&&", precedence=5)
-class ARRAY(SchemaEventTarget, sqltypes.Array):
+class ARRAY(SchemaEventTarget, sqltypes.ARRAY):
"""Postgresql ARRAY type.
.. versionchanged:: 1.1 The :class:`.postgresql.ARRAY` type is now
- a subclass of the core :class:`.Array` type.
+ a subclass of the core :class:`.types.ARRAY` type.
The :class:`.postgresql.ARRAY` type is constructed in the same way
- as the core :class:`.Array` type; a member type is required, and a
+ as the core :class:`.types.ARRAY` type; a member type is required, and a
number of dimensions is recommended if the type is to be used for more
than one dimension::
@@ -125,9 +133,9 @@ class ARRAY(SchemaEventTarget, sqltypes.Array):
)
The :class:`.postgresql.ARRAY` type provides all operations defined on the
- core :class:`.Array` type, including support for "dimensions", indexed
- access, and simple matching such as :meth:`.Array.Comparator.any`
- and :meth:`.Array.Comparator.all`. :class:`.postgresql.ARRAY` class also
+ core :class:`.types.ARRAY` type, including support for "dimensions", indexed
+ access, and simple matching such as :meth:`.types.ARRAY.Comparator.any`
+ and :meth:`.types.ARRAY.Comparator.all`. :class:`.postgresql.ARRAY` class also
provides PostgreSQL-specific methods for containment operations, including
:meth:`.postgresql.ARRAY.Comparator.contains`
:meth:`.postgresql.ARRAY.Comparator.contained_by`,
@@ -144,20 +152,20 @@ class ARRAY(SchemaEventTarget, sqltypes.Array):
.. seealso::
- :class:`.types.Array` - base array type
+ :class:`.types.ARRAY` - base array type
:class:`.postgresql.array` - produces a literal array value.
"""
- class Comparator(sqltypes.Array.Comparator):
+ class Comparator(sqltypes.ARRAY.Comparator):
"""Define comparison operations for :class:`.ARRAY`.
Note that these operations are in addition to those provided
- by the base :class:`.types.Array.Comparator` class, including
- :meth:`.types.Array.Comparator.any` and
- :meth:`.types.Array.Comparator.all`.
+ by the base :class:`.types.ARRAY.Comparator` class, including
+ :meth:`.types.ARRAY.Comparator.any` and
+ :meth:`.types.ARRAY.Comparator.all`.
"""
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index e9001f79a..692305319 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -579,7 +579,7 @@ use the following workaround type::
def handle_raw_string(value):
inner = re.match(r"^{(.*)}$", value).group(1)
- return inner.split(",")
+ return inner.split(",") if inner else []
def process(value):
if value is None:
@@ -1056,6 +1056,16 @@ class PGCompiler(compiler.SQLCompiler):
self.process(element.stop, **kw),
)
+ def visit_json_getitem_op_binary(self, binary, operator, **kw):
+ return self._generate_generic_binary(
+ binary, " -> ", **kw
+ )
+
+ def visit_json_path_getitem_op_binary(self, binary, operator, **kw):
+ return self._generate_generic_binary(
+ binary, " #> ", **kw
+ )
+
def visit_getitem_binary(self, binary, operator, **kw):
return "%s[%s]" % (
self.process(binary.left, **kw),
@@ -1471,8 +1481,11 @@ class PGIdentifierPreparer(compiler.IdentifierPreparer):
raise exc.CompileError("Postgresql ENUM type requires a name.")
name = self.quote(type_.name)
- if not self.omit_schema and use_schema and type_.schema is not None:
- name = self.quote_schema(type_.schema) + "." + name
+ effective_schema = self.schema_for_object(type_)
+
+ if not self.omit_schema and use_schema and \
+ effective_schema is not None:
+ name = self.quote_schema(effective_schema) + "." + name
return name
@@ -1565,10 +1578,15 @@ class PGExecutionContext(default.DefaultExecutionContext):
name = "%s_%s_seq" % (tab, col)
column._postgresql_seq_name = seq_name = name
- sch = column.table.schema
- if sch is not None:
+ if column.table is not None:
+ effective_schema = self.connection.schema_for_object(
+ column.table)
+ else:
+ effective_schema = None
+
+ if effective_schema is not None:
exc = "select nextval('\"%s\".\"%s\"')" % \
- (sch, seq_name)
+ (effective_schema, seq_name)
else:
exc = "select nextval('\"%s\"')" % \
(seq_name, )
diff --git a/lib/sqlalchemy/dialects/postgresql/ext.py b/lib/sqlalchemy/dialects/postgresql/ext.py
index 1a443c2d7..66c7ed0e5 100644
--- a/lib/sqlalchemy/dialects/postgresql/ext.py
+++ b/lib/sqlalchemy/dialects/postgresql/ext.py
@@ -159,7 +159,7 @@ static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
def array_agg(*arg, **kw):
"""Postgresql-specific form of :class:`.array_agg`, ensures
return type is :class:`.postgresql.ARRAY` and not
- the plain :class:`.types.Array`.
+ the plain :class:`.types.ARRAY`.
.. versionadded:: 1.1
diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py
index b7b0fc007..d2d20386a 100644
--- a/lib/sqlalchemy/dialects/postgresql/hstore.py
+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py
@@ -12,34 +12,33 @@ from .array import ARRAY
from ... import types as sqltypes
from ...sql import functions as sqlfunc
from ...sql import operators
-from ...sql.operators import custom_op
from ... import util
__all__ = ('HSTORE', 'hstore')
-INDEX = custom_op(
- "->", precedence=5, natural_self_precedent=True
+GETITEM = operators.custom_op(
+ "->", precedence=15, natural_self_precedent=True,
)
HAS_KEY = operators.custom_op(
- "?", precedence=5, natural_self_precedent=True
+ "?", precedence=15, natural_self_precedent=True
)
HAS_ALL = operators.custom_op(
- "?&", precedence=5, natural_self_precedent=True
+ "?&", precedence=15, natural_self_precedent=True
)
HAS_ANY = operators.custom_op(
- "?|", precedence=5, natural_self_precedent=True
+ "?|", precedence=15, natural_self_precedent=True
)
CONTAINS = operators.custom_op(
- "@>", precedence=5, natural_self_precedent=True
+ "@>", precedence=15, natural_self_precedent=True
)
CONTAINED_BY = operators.custom_op(
- "<@", precedence=5, natural_self_precedent=True
+ "<@", precedence=15, natural_self_precedent=True
)
@@ -166,7 +165,7 @@ class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine):
CONTAINED_BY, other, result_type=sqltypes.Boolean)
def _setup_getitem(self, index):
- return INDEX, index, self.type.text_type
+ return GETITEM, index, self.type.text_type
def defined(self, key):
"""Boolean expression. Test for presence of a non-NULL value for
diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py
index 8a50270f5..6ff9fd88e 100644
--- a/lib/sqlalchemy/dialects/postgresql/json.py
+++ b/lib/sqlalchemy/dialects/postgresql/json.py
@@ -6,10 +6,10 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from __future__ import absolute_import
-import collections
import json
+import collections
-from .base import ischema_names
+from .base import ischema_names, colspecs
from ... import types as sqltypes
from ...sql import operators
from ...sql import elements
@@ -17,70 +17,68 @@ from ... import util
__all__ = ('JSON', 'JSONB')
-
-# json : returns json
-INDEX = operators.custom_op(
- "->", precedence=5, natural_self_precedent=True
-)
-
-# path operator: returns json
-PATHIDX = operators.custom_op(
- "#>", precedence=5, natural_self_precedent=True
-)
-
-# json + astext: returns text
ASTEXT = operators.custom_op(
- "->>", precedence=5, natural_self_precedent=True
+ "->>", precedence=15, natural_self_precedent=True,
)
-# path operator + astext: returns text
-ASTEXT_PATHIDX = operators.custom_op(
- "#>>", precedence=5, natural_self_precedent=True
+JSONPATH_ASTEXT = operators.custom_op(
+ "#>>", precedence=15, natural_self_precedent=True,
)
+
HAS_KEY = operators.custom_op(
- "?", precedence=5, natural_self_precedent=True
+ "?", precedence=15, natural_self_precedent=True
)
HAS_ALL = operators.custom_op(
- "?&", precedence=5, natural_self_precedent=True
+ "?&", precedence=15, natural_self_precedent=True
)
HAS_ANY = operators.custom_op(
- "?|", precedence=5, natural_self_precedent=True
+ "?|", precedence=15, natural_self_precedent=True
)
CONTAINS = operators.custom_op(
- "@>", precedence=5, natural_self_precedent=True
+ "@>", precedence=15, natural_self_precedent=True
)
CONTAINED_BY = operators.custom_op(
- "<@", precedence=5, natural_self_precedent=True
+ "<@", precedence=15, natural_self_precedent=True
)
-class JSON(sqltypes.Indexable, sqltypes.TypeEngine):
- """Represent the Postgresql JSON type.
+class JSONPathType(sqltypes.JSON.JSONPathType):
+ def bind_processor(self, dialect):
+ def process(value):
+ assert isinstance(value, collections.Sequence)
+ tokens = [util.text_type(elem) for elem in value]
+ return "{%s}" % (", ".join(tokens))
- The :class:`.JSON` type stores arbitrary JSON format data, e.g.::
+ return process
- data_table = Table('data_table', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', JSON)
- )
+colspecs[sqltypes.JSON.JSONPathType] = JSONPathType
- with engine.connect() as conn:
- conn.execute(
- data_table.insert(),
- data = {"key1": "value1", "key2": "value2"}
- )
- :class:`.JSON` provides several operations:
+class JSON(sqltypes.JSON):
+ """Represent the Postgresql JSON type.
+
+ This type is a specialization of the Core-level :class:`.types.JSON`
+ type. Be sure to read the documentation for :class:`.types.JSON` for
+ important tips regarding treatment of NULL values and ORM use.
+
+ .. versionchanged:: 1.1 :class:`.postgresql.JSON` is now a Postgresql-
+ specific specialization of the new :class:`.types.JSON` type.
+
+ The operators provided by the Postgresql version of :class:`.JSON`
+ include:
* Index operations (the ``->`` operator)::
data_table.c.data['some key']
+ data_table.c.data[5]
+
+
* Index operations returning text (the ``->>`` operator)::
data_table.c.data['some key'].astext == 'some value'
@@ -92,11 +90,11 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine):
* Path index operations (the ``#>`` operator)::
- data_table.c.data[('key_1', 'key_2', ..., 'key_n')]
+ data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')]
* Path index operations returning text (the ``#>>`` operator)::
- data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \
+ data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')].astext == \
'some value'
.. versionchanged:: 1.1 The :meth:`.ColumnElement.cast` operator on
@@ -108,36 +106,6 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine):
:class:`.JSON` by default, so that further JSON-oriented instructions
may be called upon the result type.
- The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not
- detect in-place mutations to the structure. In order to detect these, the
- :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
- allow "in-place" changes to the datastructure to produce events which
- will be detected by the unit of work. See the example at :class:`.HSTORE`
- for a simple example involving a dictionary.
-
- When working with NULL values, the :class:`.JSON` type recommends the
- use of two specific constants in order to differentiate between a column
- that evaluates to SQL NULL, e.g. no value, vs. the JSON-encoded string
- of ``"null"``. To insert or select against a value that is SQL NULL,
- use the constant :func:`.null`::
-
- conn.execute(table.insert(), json_value=null())
-
- To insert or select against a value that is JSON ``"null"``, use the
- constant :attr:`.JSON.NULL`::
-
- conn.execute(table.insert(), json_value=JSON.NULL)
-
- The :class:`.JSON` type supports a flag
- :paramref:`.JSON.none_as_null` which when set to True will result
- in the Python constant ``None`` evaluating to the value of SQL
- NULL, and when set to False results in the Python constant
- ``None`` evaluating to the value of JSON ``"null"``. The Python
- value ``None`` may be used in conjunction with either
- :attr:`.JSON.NULL` and :func:`.null` in order to indicate NULL
- values, but care must be taken as to the value of the
- :paramref:`.JSON.none_as_null` in these cases.
-
Custom serializers and deserializers are specified at the dialect level,
that is using :func:`.create_engine`. The reason for this is that when
using psycopg2, the DBAPI only allows serializers at the per-cursor
@@ -151,43 +119,16 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine):
When using the psycopg2 dialect, the json_deserializer is registered
against the database using ``psycopg2.extras.register_default_json``.
- .. versionadded:: 0.9
-
.. seealso::
+ :class:`.types.JSON` - Core level JSON type
+
:class:`.JSONB`
"""
- __visit_name__ = 'JSON'
-
- hashable = False
astext_type = sqltypes.Text()
- NULL = util.symbol('JSON_NULL')
- """Describe the json value of NULL.
-
- This value is used to force the JSON value of ``"null"`` to be
- used as the value. A value of Python ``None`` will be recognized
- either as SQL NULL or JSON ``"null"``, based on the setting
- of the :paramref:`.JSON.none_as_null` flag; the :attr:`.JSON.NULL`
- constant can be used to always resolve to JSON ``"null"`` regardless
- of this setting. This is in contrast to the :func:`.sql.null` construct,
- which always resolves to SQL NULL. E.g.::
-
- from sqlalchemy import null
- from sqlalchemy.dialects.postgresql import JSON
-
- obj1 = MyObject(json_value=null()) # will *always* insert SQL NULL
- obj2 = MyObject(json_value=JSON.NULL) # will *always* insert JSON string "null"
-
- session.add_all([obj1, obj2])
- session.commit()
-
- .. versionadded:: 1.1
-
- """
-
def __init__(self, none_as_null=False, astext_type=None):
"""Construct a :class:`.JSON` type.
@@ -210,15 +151,14 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine):
:attr:`.JSON.Comparator.astext`
accessor on indexed attributes. Defaults to :class:`.types.Text`.
- .. versionadded:: 1.1.0
+ .. versionadded:: 1.1
"""
- self.none_as_null = none_as_null
+ super(JSON, self).__init__(none_as_null=none_as_null)
if astext_type is not None:
self.astext_type = astext_type
- class Comparator(
- sqltypes.Indexable.Comparator, sqltypes.Concatenable.Comparator):
+ class Comparator(sqltypes.JSON.Comparator):
"""Define comparison operations for :class:`.JSON`."""
@property
@@ -235,69 +175,19 @@ class JSON(sqltypes.Indexable, sqltypes.TypeEngine):
:meth:`.ColumnElement.cast`
"""
- against = self.expr.operator
- if against is PATHIDX:
- against = ASTEXT_PATHIDX
- else:
- against = ASTEXT
- return self.expr.left.operate(
- against, self.expr.right, result_type=self.type.astext_type)
-
- def _setup_getitem(self, index):
- if not isinstance(index, util.string_types):
- assert isinstance(index, collections.Sequence)
- tokens = [util.text_type(elem) for elem in index]
- index = "{%s}" % (", ".join(tokens))
- operator = PATHIDX
+ if isinstance(self.expr.right.type, sqltypes.JSON.JSONPathType):
+ return self.expr.left.operate(
+ JSONPATH_ASTEXT,
+ self.expr.right, result_type=self.type.astext_type)
else:
- operator = INDEX
-
- return operator, index, self.type
+ return self.expr.left.operate(
+ ASTEXT, self.expr.right, result_type=self.type.astext_type)
comparator_factory = Comparator
- @property
- def should_evaluate_none(self):
- return not self.none_as_null
-
- def bind_processor(self, dialect):
- json_serializer = dialect._json_serializer or json.dumps
- if util.py2k:
- encoding = dialect.encoding
- else:
- encoding = None
-
- def process(value):
- if value is self.NULL:
- value = None
- elif isinstance(value, elements.Null) or (
- value is None and self.none_as_null
- ):
- return None
- if encoding:
- return json_serializer(value).encode(encoding)
- else:
- return json_serializer(value)
-
- return process
-
- def result_processor(self, dialect, coltype):
- json_deserializer = dialect._json_deserializer or json.loads
- if util.py2k:
- encoding = dialect.encoding
- else:
- encoding = None
-
- def process(value):
- if value is None:
- return None
- if encoding:
- value = value.decode(encoding)
- return json_deserializer(value)
- return process
-
+colspecs[sqltypes.JSON] = JSON
ischema_names['json'] = JSON
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index c71f689a3..2c745e6f7 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -155,6 +155,7 @@ class PGDialect_pg8000(PGDialect):
sqltypes.Numeric: _PGNumericNoBind,
sqltypes.Float: _PGNumeric,
JSON: _PGJSON,
+ sqltypes.JSON: _PGJSON
}
)
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index d33554922..82fcc9054 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -534,6 +534,7 @@ class PGDialect_psycopg2(PGDialect):
sqltypes.Enum: _PGEnum, # needs force_unicode
HSTORE: _PGHStore,
JSON: _PGJSON,
+ sqltypes.JSON: _PGJSON,
JSONB: _PGJSONB,
UUID: _PGUUID
}
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index a1786d16c..0e048aeff 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -352,19 +352,29 @@ The index will be rendered at create time as::
.. versionadded:: 0.9.9
+.. _sqlite_dotted_column_names:
+
Dotted Column Names
-------------------
Using table or column names that explicitly have periods in them is
**not recommended**. While this is generally a bad idea for relational
databases in general, as the dot is a syntactically significant character,
-the SQLite driver has a bug which requires that SQLAlchemy filter out these
-dots in result sets.
+the SQLite driver up until version **3.10.0** of SQLite has a bug which
+requires that SQLAlchemy filter out these dots in result sets.
+
+.. versionchanged:: 1.1
+
+ The following SQLite issue has been resolved as of version 3.10.0
+ of SQLite. SQLAlchemy as of **1.1** automatically disables its internal
+ workarounds based on detection of this version.
The bug, entirely outside of SQLAlchemy, can be illustrated thusly::
import sqlite3
+ assert sqlite3.sqlite_version_info < (3, 10, 0), "bug is fixed in this version"
+
conn = sqlite3.connect(":memory:")
cursor = conn.cursor()
@@ -997,9 +1007,13 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
class SQLiteExecutionContext(default.DefaultExecutionContext):
@util.memoized_property
def _preserve_raw_colnames(self):
- return self.execution_options.get("sqlite_raw_colnames", False)
+ return not self.dialect._broken_dotted_colnames or \
+ self.execution_options.get("sqlite_raw_colnames", False)
def _translate_colname(self, colname):
+ # TODO: detect SQLite version 3.10.0 or greater;
+ # see [ticket:3633]
+
# adjust for dotted column names. SQLite
# in the case of UNION may store col names as
# "tablename.colname", or if using an attached database,
@@ -1019,7 +1033,6 @@ class SQLiteDialect(default.DefaultDialect):
supports_empty_insert = False
supports_cast = True
supports_multivalues_insert = True
- supports_right_nested_joins = False
default_paramstyle = 'qmark'
execution_ctx_cls = SQLiteExecutionContext
@@ -1044,6 +1057,7 @@ class SQLiteDialect(default.DefaultDialect):
]
_broken_fk_pragma_quotes = False
+ _broken_dotted_colnames = False
def __init__(self, isolation_level=None, native_datetime=False, **kwargs):
default.DefaultDialect.__init__(self, **kwargs)
@@ -1056,6 +1070,11 @@ class SQLiteDialect(default.DefaultDialect):
self.native_datetime = native_datetime
if self.dbapi is not None:
+ self.supports_right_nested_joins = (
+ self.dbapi.sqlite_version_info >= (3, 7, 16))
+ self._broken_dotted_colnames = (
+ self.dbapi.sqlite_version_info < (3, 10, 0)
+ )
self.supports_default_values = (
self.dbapi.sqlite_version_info >= (3, 3, 8))
self.supports_cast = (
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index b3f8e307a..187521831 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -336,11 +336,7 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
s += "TOP %s " % (limit,)
offset = select._offset
if offset:
- if not limit:
- # FIXME: sybase doesn't allow an offset without a limit
- # so use a huge value for TOP here
- s += "TOP 1000000 "
- s += "START AT %s " % (offset + 1,)
+ raise NotImplementedError("Sybase ASE does not support OFFSET")
return s
def get_from_hint_text(self, table, text):
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 0b0d50329..02c35d6a9 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -53,6 +53,7 @@ url.py
from .interfaces import (
Connectable,
+ CreateEnginePlugin,
Dialect,
ExecutionContext,
ExceptionContext,
@@ -390,7 +391,7 @@ def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
"""Create a new Engine instance using a configuration dictionary.
The dictionary is typically produced from a config file.
-
+
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
indicates the prefix to be searched for. Each matching key (after the
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index eaa435d45..0b928566d 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -14,6 +14,7 @@ from __future__ import with_statement
import sys
from .. import exc, util, log, interfaces
from ..sql import util as sql_util
+from ..sql import schema
from .interfaces import Connectable, ExceptionContext
from .util import _distill_params
import contextlib
@@ -44,6 +45,22 @@ class Connection(Connectable):
"""
+ schema_for_object = schema._schema_getter(None)
+ """Return the ".schema" attribute for an object.
+
+ Used for :class:`.Table`, :class:`.Sequence` and similar objects,
+ and takes into account
+ the :paramref:`.Connection.execution_options.schema_translate_map`
+ parameter.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`schema_translating`
+
+ """
+
def __init__(self, engine, connection=None, close_with_result=False,
_branch_from=None, _execution_options=None,
_dispatch=None,
@@ -67,6 +84,7 @@ class Connection(Connectable):
self.should_close_with_result = False
self.dispatch = _dispatch
self._has_events = _branch_from._has_events
+ self.schema_for_object = _branch_from.schema_for_object
else:
self.__connection = connection \
if connection is not None else engine.raw_connection()
@@ -277,6 +295,19 @@ class Connection(Connectable):
of many DBAPIs. The flag is currently understood only by the
psycopg2 dialect.
+ :param schema_translate_map: Available on: Connection, Engine.
+ A dictionary mapping schema names to schema names, that will be
+ applied to the :paramref:`.Table.schema` element of each
+ :class:`.Table` encountered when SQL or DDL expression elements
+ are compiled into strings; the resulting schema name will be
+ converted based on presence in the map of the original name.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`schema_translating`
+
"""
c = self._clone()
c._execution_options = c._execution_options.union(opt)
@@ -959,7 +990,10 @@ class Connection(Connectable):
dialect = self.dialect
- compiled = ddl.compile(dialect=dialect)
+ compiled = ddl.compile(
+ dialect=dialect,
+ schema_translate_map=self.schema_for_object
+ if not self.schema_for_object.is_default else None)
ret = self._execute_context(
dialect,
dialect.execution_ctx_cls._init_ddl,
@@ -990,17 +1024,26 @@ class Connection(Connectable):
dialect = self.dialect
if 'compiled_cache' in self._execution_options:
- key = dialect, elem, tuple(sorted(keys)), len(distilled_params) > 1
+ key = (
+ dialect, elem, tuple(sorted(keys)),
+ self.schema_for_object.hash_key,
+ len(distilled_params) > 1
+ )
compiled_sql = self._execution_options['compiled_cache'].get(key)
if compiled_sql is None:
compiled_sql = elem.compile(
dialect=dialect, column_keys=keys,
- inline=len(distilled_params) > 1)
+ inline=len(distilled_params) > 1,
+ schema_translate_map=self.schema_for_object
+ if not self.schema_for_object.is_default else None
+ )
self._execution_options['compiled_cache'][key] = compiled_sql
else:
compiled_sql = elem.compile(
dialect=dialect, column_keys=keys,
- inline=len(distilled_params) > 1)
+ inline=len(distilled_params) > 1,
+ schema_translate_map=self.schema_for_object
+ if not self.schema_for_object.is_default else None)
ret = self._execute_context(
dialect,
@@ -1155,7 +1198,7 @@ class Connection(Connectable):
if context.compiled:
context.post_exec()
- if context.is_crud:
+ if context.is_crud or context.is_text:
result = context._setup_crud_result_proxy()
else:
result = context.get_result_proxy()
@@ -1686,6 +1729,22 @@ class Engine(Connectable, log.Identified):
_has_events = False
_connection_cls = Connection
+ schema_for_object = schema._schema_getter(None)
+ """Return the ".schema" attribute for an object.
+
+ Used for :class:`.Table`, :class:`.Sequence` and similar objects,
+ and takes into account
+ the :paramref:`.Connection.execution_options.schema_translate_map`
+ parameter.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`schema_translating`
+
+ """
+
def __init__(self, pool, dialect, url,
logging_name=None, echo=None, proxy=None,
execution_options=None
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 9a7b80bfd..9f845e79d 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -16,7 +16,7 @@ as the base class for their own corresponding classes.
import re
import random
from . import reflection, interfaces, result
-from ..sql import compiler, expression
+from ..sql import compiler, expression, schema
from .. import types as sqltypes
from .. import exc, util, pool, processors
import codecs
@@ -398,10 +398,22 @@ class DefaultDialect(interfaces.Dialect):
if not branch:
self._set_connection_isolation(connection, isolation_level)
+ if 'schema_translate_map' in opts:
+ getter = schema._schema_getter(opts['schema_translate_map'])
+ engine.schema_for_object = getter
+
+ @event.listens_for(engine, "engine_connect")
+ def set_schema_translate_map(connection, branch):
+ connection.schema_for_object = getter
+
def set_connection_execution_options(self, connection, opts):
if 'isolation_level' in opts:
self._set_connection_isolation(connection, opts['isolation_level'])
+ if 'schema_translate_map' in opts:
+ getter = schema._schema_getter(opts['schema_translate_map'])
+ connection.schema_for_object = getter
+
def _set_connection_isolation(self, connection, level):
if connection.in_transaction():
util.warn(
@@ -462,11 +474,29 @@ class DefaultDialect(interfaces.Dialect):
self.set_isolation_level(dbapi_conn, self.default_isolation_level)
+class StrCompileDialect(DefaultDialect):
+
+ statement_compiler = compiler.StrSQLCompiler
+ ddl_compiler = compiler.DDLCompiler
+ type_compiler = compiler.StrSQLTypeCompiler
+ preparer = compiler.IdentifierPreparer
+
+ supports_sequences = True
+ sequences_optional = True
+ preexecute_autoincrement_sequences = False
+ implicit_returning = False
+
+ supports_native_boolean = True
+
+ supports_simple_order_by_label = True
+
+
class DefaultExecutionContext(interfaces.ExecutionContext):
isinsert = False
isupdate = False
isdelete = False
is_crud = False
+ is_text = False
isddl = False
executemany = False
compiled = None
@@ -531,7 +561,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
connection._execution_options)
self.result_column_struct = (
- compiled._result_columns, compiled._ordered_columns)
+ compiled._result_columns, compiled._ordered_columns,
+ compiled._textual_ordered_columns)
self.unicode_statement = util.text_type(compiled)
if not dialect.supports_unicode_statements:
@@ -543,6 +574,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.isinsert = compiled.isinsert
self.isupdate = compiled.isupdate
self.isdelete = compiled.isdelete
+ self.is_text = compiled.isplaintext
if not parameters:
self.compiled_parameters = [compiled.construct_params()]
@@ -622,6 +654,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
self.root_connection = connection
self._dbapi_connection = dbapi_connection
self.dialect = connection.dialect
+ self.is_text = True
# plain text statement
self.execution_options = connection._execution_options
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 3bad765df..c84823d1e 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -7,7 +7,7 @@
"""Define core interfaces used by the engine system."""
-from .. import util, event
+from .. import util
# backwards compat
from ..sql.compiler import Compiled, TypeCompiler
@@ -781,6 +781,111 @@ class Dialect(object):
pass
+class CreateEnginePlugin(object):
+ """A set of hooks intended to augment the construction of an
+ :class:`.Engine` object based on entrypoint names in a URL.
+
+ The purpose of :class:`.CreateEnginePlugin` is to allow third-party
+ systems to apply engine, pool and dialect level event listeners without
+ the need for the target application to be modified; instead, the plugin
+ names can be added to the database URL. Target applications for
+ :class:`.CreateEnginePlugin` include:
+
+ * connection and SQL performance tools, e.g. which use events to track
+ number of checkouts and/or time spent with statements
+
+ * connectivity plugins such as proxies
+
+ Plugins are registered using entry points in a similar way as that
+ of dialects::
+
+ entry_points={
+ 'sqlalchemy.plugins': [
+ 'myplugin = myapp.plugins:MyPlugin'
+ ]
+
+ A plugin that uses the above names would be invoked from a database
+ URL as in::
+
+ from sqlalchemy import create_engine
+
+ engine = create_engine(
+ "mysql+pymysql://scott:tiger@localhost/test?plugin=myplugin")
+
+ The ``plugin`` argument supports multiple instances, so that a URL
+ may specify multiple plugins; they are loaded in the order stated
+ in the URL::
+
+ engine = create_engine(
+ "mysql+pymysql://scott:tiger@localhost/"
+ "test?plugin=plugin_one&plugin=plugin_twp&plugin=plugin_three")
+
+ A plugin can receive additional arguments from the URL string as
+ well as from the keyword arguments passed to :func:`.create_engine`.
+ The :class:`.URL` object and the keyword dictionary are passed to the
+ constructor so that these arguments can be extracted from the url's
+ :attr:`.URL.query` collection as well as from the dictionary::
+
+ class MyPlugin(CreateEnginePlugin):
+ def __init__(self, url, kwargs):
+ self.my_argument_one = url.query.pop('my_argument_one')
+ self.my_argument_two = url.query.pop('my_argument_two')
+ self.my_argument_three = kwargs.pop('my_argument_three', None)
+
+ Arguments like those illustrated above would be consumed from the
+ following::
+
+ from sqlalchemy import create_engine
+
+ engine = create_engine(
+ "mysql+pymysql://scott:tiger@localhost/"
+ "test?plugin=myplugin&my_argument_one=foo&my_argument_two=bar",
+ my_argument_three='bat')
+
+ The URL and dictionary are used for subsequent setup of the engine
+ as they are, so the plugin can modify their arguments in-place.
+ Arguments that are only understood by the plugin should be popped
+ or otherwise removed so that they aren't interpreted as erroneous
+ arguments afterwards.
+
+ When the engine creation process completes and produces the
+ :class:`.Engine` object, it is again passed to the plugin via the
+ :meth:`.CreateEnginePlugin.engine_created` hook. In this hook, additional
+ changes can be made to the engine, most typically involving setup of
+ events (e.g. those defined in :ref:`core_event_toplevel`).
+
+ .. versionadded:: 1.1
+
+ """
+ def __init__(self, url, kwargs):
+ """Contruct a new :class:`.CreateEnginePlugin`.
+
+ The plugin object is instantiated individually for each call
+ to :func:`.create_engine`. A single :class:`.Engine` will be
+ passed to the :meth:`.CreateEnginePlugin.engine_created` method
+ corresponding to this URL.
+
+ :param url: the :class:`.URL` object. The plugin should inspect
+ what it needs here as well as remove its custom arguments from the
+ :attr:`.URL.query` collection. The URL can be modified in-place
+ in any other way as well.
+ :param kwargs: The keyword arguments passed to :func`.create_engine`.
+ The plugin can read and modify this dictionary in-place, to affect
+ the ultimate arguments used to create the engine. It should
+ remove its custom arguments from the dictionary as well.
+
+ """
+ self.url = url
+
+ def engine_created(self, engine):
+ """Receive the :class:`.Engine` object when it is fully constructed.
+
+ The plugin may make additional changes to the engine, such as
+ registering engine or connection pool events.
+
+ """
+
+
class ExecutionContext(object):
"""A messenger object for a Dialect that corresponds to a single
execution.
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 59eed51ec..6880660ce 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -529,7 +529,8 @@ class Inspector(object):
"""
dialect = self.bind.dialect
- schema = table.schema
+ schema = self.bind.schema_for_object(table)
+
table_name = table.name
# get table-level arguments that are specifically
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 7d1425c28..cc4ac74cd 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -84,8 +84,8 @@ except ImportError:
raise
if index is None:
raise exc.InvalidRequestError(
- "Ambiguous column name '%s' in result set! "
- "try 'use_labels' option on select statement." % key)
+ "Ambiguous column name '%s' in "
+ "result set column descriptions" % obj)
if processor is not None:
return processor(self._row[index])
else:
@@ -186,97 +186,29 @@ class ResultMetaData(object):
"""Handle cursor.description, applying additional info from an execution
context."""
- def __init__(self, parent, metadata):
+ __slots__ = (
+ '_keymap', 'case_sensitive', 'matched_on_name',
+ '_processors', 'keys', '_orig_processors')
+
+ def __init__(self, parent, cursor_description):
context = parent.context
dialect = context.dialect
- typemap = dialect.dbapi_type_map
- translate_colname = context._translate_colname
- self.case_sensitive = case_sensitive = dialect.case_sensitive
+ self.case_sensitive = dialect.case_sensitive
+ self.matched_on_name = False
if context.result_column_struct:
- result_columns, cols_are_ordered = context.result_column_struct
+ result_columns, cols_are_ordered, textual_ordered = \
+ context.result_column_struct
num_ctx_cols = len(result_columns)
else:
- num_ctx_cols = None
-
- if num_ctx_cols and \
- cols_are_ordered and \
- num_ctx_cols == len(metadata):
- # case 1 - SQL expression statement, number of columns
- # in result matches number of cols in compiled. This is the
- # vast majority case for SQL expression constructs. In this
- # case we don't bother trying to parse or match up to
- # the colnames in the result description.
- raw = [
- (
- idx,
- key,
- name.lower() if not case_sensitive else name,
- context.get_result_processor(
- type_, key, metadata[idx][1]
- ),
- obj,
- None
- ) for idx, (key, name, obj, type_)
- in enumerate(result_columns)
- ]
- self.keys = [
- elem[0] for elem in result_columns
- ]
- else:
- # case 2 - raw string, or number of columns in result does
- # not match number of cols in compiled. The raw string case
- # is very common. The latter can happen
- # when text() is used with only a partial typemap, or
- # in the extremely unlikely cases where the compiled construct
- # has a single element with multiple col expressions in it
- # (e.g. has commas embedded) or there's some kind of statement
- # that is adding extra columns.
- # In all these cases we fall back to the "named" approach
- # that SQLAlchemy has used up through 0.9.
-
- if num_ctx_cols:
- result_map = self._create_result_map(
- result_columns, case_sensitive)
-
- raw = []
- self.keys = []
- untranslated = None
- for idx, rec in enumerate(metadata):
- colname = rec[0]
- coltype = rec[1]
-
- if dialect.description_encoding:
- colname = dialect._description_decoder(colname)
-
- if translate_colname:
- colname, untranslated = translate_colname(colname)
-
- if dialect.requires_name_normalize:
- colname = dialect.normalize_name(colname)
-
- self.keys.append(colname)
- if not case_sensitive:
- colname = colname.lower()
-
- if num_ctx_cols:
- try:
- ctx_rec = result_map[colname]
- except KeyError:
- mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
- obj = None
- else:
- obj = ctx_rec[1]
- mapped_type = ctx_rec[2]
- else:
- mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
- obj = None
- processor = context.get_result_processor(
- mapped_type, colname, coltype)
+ result_columns = cols_are_ordered = \
+ num_ctx_cols = textual_ordered = False
- raw.append(
- (idx, colname, colname, processor, obj, untranslated)
- )
+ # merge cursor.description with the column info
+ # present in the compiled structure, if any
+ raw = self._merge_cursor_description(
+ context, cursor_description, result_columns,
+ num_ctx_cols, cols_are_ordered, textual_ordered)
# keymap indexes by integer index...
self._keymap = dict([
@@ -288,12 +220,16 @@ class ResultMetaData(object):
# views like __iter__ and slices
self._processors = [elem[3] for elem in raw]
+ # keymap by primary string...
+ by_key = dict([
+ (elem[2], (elem[3], elem[4], elem[0]))
+ for elem in raw
+ ])
+
+ # for compiled SQL constructs, copy additional lookup keys into
+ # the key lookup map, such as Column objects, labels,
+ # column keys and other names
if num_ctx_cols:
- # keymap by primary string...
- by_key = dict([
- (elem[2], (elem[3], elem[4], elem[0]))
- for elem in raw
- ])
# if by-primary-string dictionary smaller (or bigger?!) than
# number of columns, assume we have dupes, rewrite
@@ -304,30 +240,250 @@ class ResultMetaData(object):
for rec in raw:
key = rec[1]
if key in seen:
- by_key[key] = (None, by_key[key][1], None)
+ # this is an "ambiguous" element, replacing
+ # the full record in the map
+ by_key[key] = (None, key, None)
seen.add(key)
- # update keymap with secondary "object"-based keys
+ # copy secondary elements from compiled columns
+ # into self._keymap, write in the potentially "ambiguous"
+ # element
+ self._keymap.update([
+ (obj_elem, by_key[elem[2]])
+ for elem in raw if elem[4]
+ for obj_elem in elem[4]
+ ])
+
+ # if we did a pure positional match, then reset the
+ # original "expression element" back to the "unambiguous"
+ # entry. This is a new behavior in 1.1 which impacts
+ # TextAsFrom but also straight compiled SQL constructs.
+ if not self.matched_on_name:
+ self._keymap.update([
+ (elem[4][0], (elem[3], elem[4], elem[0]))
+ for elem in raw if elem[4]
+ ])
+ else:
+ # no dupes - copy secondary elements from compiled
+ # columns into self._keymap
+ self._keymap.update([
+ (obj_elem, (elem[3], elem[4], elem[0]))
+ for elem in raw if elem[4]
+ for obj_elem in elem[4]
+ ])
+
+ # update keymap with primary string names taking
+ # precedence
+ self._keymap.update(by_key)
+
+ # update keymap with "translated" names (sqlite-only thing)
+ if not num_ctx_cols and context._translate_colname:
self._keymap.update([
- (obj_elem, by_key[elem[2]])
- for elem in raw if elem[4]
- for obj_elem in elem[4]
+ (elem[5], self._keymap[elem[2]])
+ for elem in raw if elem[5]
])
- # update keymap with primary string names taking
- # precedence
- self._keymap.update(by_key)
+ def _merge_cursor_description(
+ self, context, cursor_description, result_columns,
+ num_ctx_cols, cols_are_ordered, textual_ordered):
+ """Merge a cursor.description with compiled result column information.
+
+ There are at least four separate strategies used here, selected
+ depending on the type of SQL construct used to start with.
+
+ The most common case is that of the compiled SQL expression construct,
+ which generated the column names present in the raw SQL string and
+ which has the identical number of columns as were reported by
+ cursor.description. In this case, we assume a 1-1 positional mapping
+ between the entries in cursor.description and the compiled object.
+ This is also the most performant case as we disregard extracting /
+ decoding the column names present in cursor.description since we
+ already have the desired name we generated in the compiled SQL
+ construct.
+
+ The next common case is that of the completely raw string SQL,
+ such as passed to connection.execute(). In this case we have no
+ compiled construct to work with, so we extract and decode the
+ names from cursor.description and index those as the primary
+ result row target keys.
+
+ The remaining fairly common case is that of the textual SQL
+ that includes at least partial column information; this is when
+ we use a :class:`.TextAsFrom` construct. This contruct may have
+ unordered or ordered column information. In the ordered case, we
+ merge the cursor.description and the compiled construct's information
+ positionally, and warn if there are additional description names
+ present, however we still decode the names in cursor.description
+ as we don't have a guarantee that the names in the columns match
+ on these. In the unordered case, we match names in cursor.description
+ to that of the compiled construct based on name matching.
+ In both of these cases, the cursor.description names and the column
+ expression objects and names are indexed as result row target keys.
+
+ The final case is much less common, where we have a compiled
+ non-textual SQL expression construct, but the number of columns
+ in cursor.description doesn't match what's in the compiled
+ construct. We make the guess here that there might be textual
+ column expressions in the compiled construct that themselves include
+ a comma in them causing them to split. We do the same name-matching
+ as with textual non-ordered columns.
+
+ The name-matched system of merging is the same as that used by
+ SQLAlchemy for all cases up through te 0.9 series. Positional
+ matching for compiled SQL expressions was introduced in 1.0 as a
+ major performance feature, and positional matching for textual
+ :class:`.TextAsFrom` objects in 1.1. As name matching is no longer
+ a common case, it was acceptable to factor it into smaller generator-
+ oriented methods that are easier to understand, but incur slightly
+ more performance overhead.
+
+ """
+
+ case_sensitive = context.dialect.case_sensitive
+
+ if num_ctx_cols and \
+ cols_are_ordered and \
+ not textual_ordered and \
+ num_ctx_cols == len(cursor_description):
+ self.keys = [elem[0] for elem in result_columns]
+ # pure positional 1-1 case; doesn't need to read
+ # the names from cursor.description
+ return [
+ (
+ idx,
+ key,
+ name.lower() if not case_sensitive else name,
+ context.get_result_processor(
+ type_, key, cursor_description[idx][1]
+ ),
+ obj,
+ None
+ ) for idx, (key, name, obj, type_)
+ in enumerate(result_columns)
+ ]
else:
- self._keymap.update([
- (elem[2], (elem[3], elem[4], elem[0]))
- for elem in raw
- ])
- # update keymap with "translated" names (sqlite-only thing)
+ # name-based or text-positional cases, where we need
+ # to read cursor.description names
+ if textual_ordered:
+ # textual positional case
+ raw_iterator = self._merge_textual_cols_by_position(
+ context, cursor_description, result_columns)
+ elif num_ctx_cols:
+ # compiled SQL with a mismatch of description cols
+ # vs. compiled cols, or textual w/ unordered columns
+ raw_iterator = self._merge_cols_by_name(
+ context, cursor_description, result_columns)
+ else:
+ # no compiled SQL, just a raw string
+ raw_iterator = self._merge_cols_by_none(
+ context, cursor_description)
+
+ return [
+ (
+ idx, colname, colname,
+ context.get_result_processor(
+ mapped_type, colname, coltype),
+ obj, untranslated)
+
+ for idx, colname, mapped_type, coltype, obj, untranslated
+ in raw_iterator
+ ]
+
+ def _colnames_from_description(self, context, cursor_description):
+ """Extract column names and data types from a cursor.description.
+
+ Applies unicode decoding, column translation, "normalization",
+ and case sensitivity rules to the names based on the dialect.
+
+ """
+
+ dialect = context.dialect
+ case_sensitive = dialect.case_sensitive
+ translate_colname = context._translate_colname
+ description_decoder = dialect._description_decoder \
+ if dialect.description_encoding else None
+ normalize_name = dialect.normalize_name \
+ if dialect.requires_name_normalize else None
+ untranslated = None
+
+ self.keys = []
+
+ for idx, rec in enumerate(cursor_description):
+ colname = rec[0]
+ coltype = rec[1]
+
+ if description_decoder:
+ colname = description_decoder(colname)
+
if translate_colname:
- self._keymap.update([
- (elem[5], self._keymap[elem[2]])
- for elem in raw if elem[5]
- ])
+ colname, untranslated = translate_colname(colname)
+
+ if normalize_name:
+ colname = normalize_name(colname)
+
+ self.keys.append(colname)
+ if not case_sensitive:
+ colname = colname.lower()
+
+ yield idx, colname, untranslated, coltype
+
+ def _merge_textual_cols_by_position(
+ self, context, cursor_description, result_columns):
+ dialect = context.dialect
+ typemap = dialect.dbapi_type_map
+ num_ctx_cols = len(result_columns) if result_columns else None
+
+ if num_ctx_cols > len(cursor_description):
+ util.warn(
+ "Number of columns in textual SQL (%d) is "
+ "smaller than number of columns requested (%d)" % (
+ num_ctx_cols, len(cursor_description)
+ ))
+
+ seen = set()
+ for idx, colname, untranslated, coltype in \
+ self._colnames_from_description(context, cursor_description):
+ if idx < num_ctx_cols:
+ ctx_rec = result_columns[idx]
+ obj = ctx_rec[2]
+ mapped_type = ctx_rec[3]
+ if obj[0] in seen:
+ raise exc.InvalidRequestError(
+ "Duplicate column expression requested "
+ "in textual SQL: %r" % obj[0])
+ seen.add(obj[0])
+ else:
+ mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
+ obj = None
+
+ yield idx, colname, mapped_type, coltype, obj, untranslated
+
+ def _merge_cols_by_name(self, context, cursor_description, result_columns):
+ dialect = context.dialect
+ typemap = dialect.dbapi_type_map
+ case_sensitive = dialect.case_sensitive
+ result_map = self._create_result_map(result_columns, case_sensitive)
+
+ self.matched_on_name = True
+ for idx, colname, untranslated, coltype in \
+ self._colnames_from_description(context, cursor_description):
+ try:
+ ctx_rec = result_map[colname]
+ except KeyError:
+ mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
+ obj = None
+ else:
+ obj = ctx_rec[1]
+ mapped_type = ctx_rec[2]
+ yield idx, colname, mapped_type, coltype, obj, untranslated
+
+ def _merge_cols_by_none(self, context, cursor_description):
+ dialect = context.dialect
+ typemap = dialect.dbapi_type_map
+ for idx, colname, untranslated, coltype in \
+ self._colnames_from_description(context, cursor_description):
+ mapped_type = typemap.get(coltype, sqltypes.NULLTYPE)
+ yield idx, colname, mapped_type, coltype, None, untranslated
@classmethod
def _create_result_map(cls, result_columns, case_sensitive=True):
@@ -347,22 +503,6 @@ class ResultMetaData(object):
d[key] = rec
return d
- @util.pending_deprecation("0.8", "sqlite dialect uses "
- "_translate_colname() now")
- def _set_keymap_synonym(self, name, origname):
- """Set a synonym for the given name.
-
- Some dialects (SQLite at the moment) may use this to
- adjust the column names that are significant within a
- row.
-
- """
- rec = (processor, obj, i) = self._keymap[origname if
- self.case_sensitive
- else origname.lower()]
- if self._keymap.setdefault(name, rec) is not rec:
- self._keymap[name] = (processor, obj, None)
-
def _key_fallback(self, key, raiseerr=True):
map = self._keymap
result = None
@@ -427,8 +567,8 @@ class ResultMetaData(object):
if index is None:
raise exc.InvalidRequestError(
- "Ambiguous column name '%s' in result set! "
- "try 'use_labels' option on select statement." % key)
+ "Ambiguous column name '%s' in "
+ "result set column descriptions" % obj)
return operator.itemgetter(index)
@@ -441,6 +581,7 @@ class ResultMetaData(object):
),
'keys': self.keys,
"case_sensitive": self.case_sensitive,
+ "matched_on_name": self.matched_on_name
}
def __setstate__(self, state):
@@ -454,7 +595,7 @@ class ResultMetaData(object):
keymap[key] = (None, None, index)
self.keys = state['keys']
self.case_sensitive = state['case_sensitive']
- self._echo = False
+ self.matched_on_name = state['matched_on_name']
class ResultProxy(object):
@@ -511,20 +652,20 @@ class ResultProxy(object):
return has_key(key)
def _init_metadata(self):
- metadata = self._cursor_description()
- if metadata is not None:
+ cursor_description = self._cursor_description()
+ if cursor_description is not None:
if self.context.compiled and \
'compiled_cache' in self.context.execution_options:
if self.context.compiled._cached_metadata:
self._metadata = self.context.compiled._cached_metadata
else:
self._metadata = self.context.compiled._cached_metadata = \
- ResultMetaData(self, metadata)
+ ResultMetaData(self, cursor_description)
else:
- self._metadata = ResultMetaData(self, metadata)
+ self._metadata = ResultMetaData(self, cursor_description)
if self._echo:
self.context.engine.logger.debug(
- "Col %r", tuple(x[0] for x in metadata))
+ "Col %r", tuple(x[0] for x in cursor_description))
def keys(self):
"""Return the current set of string keys for rows."""
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index a539ee9f7..d8e2d4764 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -18,8 +18,9 @@ New strategies can be added via new ``EngineStrategy`` classes.
from operator import attrgetter
from sqlalchemy.engine import base, threadlocal, url
-from sqlalchemy import util, exc, event
+from sqlalchemy import util, event
from sqlalchemy import pool as poollib
+from sqlalchemy.sql import schema
strategies = {}
@@ -48,6 +49,10 @@ class DefaultEngineStrategy(EngineStrategy):
# create url.URL object
u = url.make_url(name_or_url)
+ plugins = u._instantiate_plugins(kwargs)
+
+ u.query.pop('plugin', None)
+
entrypoint = u._get_entrypoint()
dialect_cls = entrypoint.get_dialect_cls(u)
@@ -169,6 +174,9 @@ class DefaultEngineStrategy(EngineStrategy):
if entrypoint is not dialect_cls:
entrypoint.engine_created(engine)
+ for plugin in plugins:
+ plugin.engine_created(engine)
+
return engine
@@ -226,6 +234,8 @@ class MockEngineStrategy(EngineStrategy):
dialect = property(attrgetter('_dialect'))
name = property(lambda s: s._dialect.name)
+ schema_for_object = schema._schema_getter(None)
+
def contextual_connect(self, **kwargs):
return self
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index 32e3f8a6b..9a955948a 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -17,7 +17,7 @@ be used directly and is also accepted directly by ``create_engine()``.
import re
from .. import exc, util
from . import Dialect
-from ..dialects import registry
+from ..dialects import registry, plugins
class URL(object):
@@ -117,6 +117,14 @@ class URL(object):
else:
return self.drivername.split('+')[1]
+ def _instantiate_plugins(self, kwargs):
+ plugin_names = util.to_list(self.query.get('plugin', ()))
+
+ return [
+ plugins.load(plugin_name)(self, kwargs)
+ for plugin_name in plugin_names
+ ]
+
def _get_entrypoint(self):
"""Return the "entry point" dialect class.
diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py
index 218ed64e1..616cd070d 100644
--- a/lib/sqlalchemy/ext/automap.py
+++ b/lib/sqlalchemy/ext/automap.py
@@ -112,7 +112,7 @@ explicit table declaration::
Base.classes.user_order
Specifying Classes Explicitly
-============================
+=============================
The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined
explicitly, in a way similar to that of the :class:`.DeferredReflection` class.
diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py
index 9717e41c0..d4d2ed2ef 100644
--- a/lib/sqlalchemy/ext/compiler.py
+++ b/lib/sqlalchemy/ext/compiler.py
@@ -121,9 +121,19 @@ below where we generate a CHECK constraint that embeds a SQL expression::
def compile_my_constraint(constraint, ddlcompiler, **kw):
return "CONSTRAINT %s CHECK (%s)" % (
constraint.name,
- ddlcompiler.sql_compiler.process(constraint.expression)
+ ddlcompiler.sql_compiler.process(
+ constraint.expression, literal_binds=True)
)
+Above, we add an additional flag to the process step as called by
+:meth:`.SQLCompiler.process`, which is the ``literal_binds`` flag. This
+indicates that any SQL expression which refers to a :class:`.BindParameter`
+object or other "literal" object such as those which refer to strings or
+integers should be rendered **in-place**, rather than being referred to as
+a bound parameter; when emitting DDL, bound parameters are typically not
+supported.
+
+
.. _enabling_compiled_autocommit:
Enabling Autocommit on a Construct
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index dfc47ce95..5fe427bc2 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -397,6 +397,15 @@ class ConcreteBase(object):
'polymorphic_identity':'manager',
'concrete':True}
+ .. seealso::
+
+ :class:`.AbstractConcreteBase`
+
+ :ref:`concrete_inheritance`
+
+ :ref:`inheritance_concrete_helpers`
+
+
"""
@classmethod
@@ -495,6 +504,13 @@ class AbstractConcreteBase(ConcreteBase):
have been reworked to support relationships established directly
on the abstract base, without any special configurational steps.
+ .. seealso::
+
+ :class:`.ConcreteBase`
+
+ :ref:`concrete_inheritance`
+
+ :ref:`inheritance_concrete_helpers`
"""
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 95aa14a26..53afdcb28 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -112,6 +112,7 @@ class Mapper(InspectionAttr):
include_properties=None,
exclude_properties=None,
passive_updates=True,
+ passive_deletes=False,
confirm_deleted_rows=True,
eager_defaults=False,
legacy_is_orphan=False,
@@ -319,6 +320,40 @@ class Mapper(InspectionAttr):
ordering for entities. By default mappers have no pre-defined
ordering.
+ :param passive_deletes: Indicates DELETE behavior of foreign key
+ columns when a joined-table inheritance entity is being deleted.
+ Defaults to ``False`` for a base mapper; for an inheriting mapper,
+ defaults to ``False`` unless the value is set to ``True``
+ on the superclass mapper.
+
+ When ``True``, it is assumed that ON DELETE CASCADE is configured
+ on the foreign key relationships that link this mapper's table
+ to its superclass table, so that when the unit of work attempts
+ to delete the entity, it need only emit a DELETE statement for the
+ superclass table, and not this table.
+
+ When ``False``, a DELETE statement is emitted for this mapper's
+ table individually. If the primary key attributes local to this
+ table are unloaded, then a SELECT must be emitted in order to
+ validate these attributes; note that the primary key columns
+ of a joined-table subclass are not part of the "primary key" of
+ the object as a whole.
+
+ Note that a value of ``True`` is **always** forced onto the
+ subclass mappers; that is, it's not possible for a superclass
+ to specify passive_deletes without this taking effect for
+ all subclass mappers.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`passive_deletes` - description of similar feature as
+ used with :func:`.relationship`
+
+ :paramref:`.mapper.passive_updates` - supporting ON UPDATE
+ CASCADE for joined-table inheritance mappers
+
:param passive_updates: Indicates UPDATE behavior of foreign key
columns when a primary key column changes on a joined-table
inheritance mapping. Defaults to ``True``.
@@ -339,6 +374,9 @@ class Mapper(InspectionAttr):
:ref:`passive_updates` - description of a similar feature as
used with :func:`.relationship`
+ :paramref:`.mapper.passive_deletes` - supporting ON DELETE
+ CASCADE for joined-table inheritance mappers
+
:param polymorphic_on: Specifies the column, attribute, or
SQL expression used to determine the target class for an
incoming row, when inheriting classes are present.
@@ -559,6 +597,7 @@ class Mapper(InspectionAttr):
self._dependency_processors = []
self.validators = util.immutabledict()
self.passive_updates = passive_updates
+ self.passive_deletes = passive_deletes
self.legacy_is_orphan = legacy_is_orphan
self._clause_adapter = None
self._requires_row_aliasing = False
@@ -971,6 +1010,8 @@ class Mapper(InspectionAttr):
self.inherits._inheriting_mappers.append(self)
self.base_mapper = self.inherits.base_mapper
self.passive_updates = self.inherits.passive_updates
+ self.passive_deletes = self.inherits.passive_deletes or \
+ self.passive_deletes
self._all_tables = self.inherits._all_tables
if self.polymorphic_identity is not None:
@@ -982,7 +1023,7 @@ class Mapper(InspectionAttr):
(self.polymorphic_identity,
self.polymorphic_map[self.polymorphic_identity],
self, self.polymorphic_identity)
- )
+ )
self.polymorphic_map[self.polymorphic_identity] = self
else:
@@ -1591,7 +1632,12 @@ class Mapper(InspectionAttr):
if key in self._props and \
not isinstance(prop, properties.ColumnProperty) and \
- not isinstance(self._props[key], properties.ColumnProperty):
+ not isinstance(
+ self._props[key],
+ (
+ properties.ColumnProperty,
+ properties.ConcreteInheritedProperty)
+ ):
util.warn("Property %s on %s being replaced with new "
"property %s; the old property will be discarded" % (
self._props[key],
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index e6a2c0634..30b39f600 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -241,6 +241,8 @@ def delete_obj(base_mapper, states, uowtransaction):
mapper = table_to_mapper[table]
if table not in mapper._pks_by_table:
continue
+ elif mapper.inherits and mapper.passive_deletes:
+ continue
delete = _collect_delete_commands(base_mapper, uowtransaction,
table, states_to_delete)
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index e1b920bbb..6b808a701 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -2741,22 +2741,37 @@ class Query(object):
self.session._autoflush()
return self._execute_and_instances(context)
+ def __str__(self):
+ context = self._compile_context()
+ try:
+ bind = self._get_bind_args(
+ context, self.session.get_bind) if self.session else None
+ except sa_exc.UnboundExecutionError:
+ bind = None
+ return str(context.statement.compile(bind))
+
def _connection_from_session(self, **kw):
- conn = self.session.connection(
- **kw)
+ conn = self.session.connection(**kw)
if self._execution_options:
conn = conn.execution_options(**self._execution_options)
return conn
def _execute_and_instances(self, querycontext):
- conn = self._connection_from_session(
- mapper=self._bind_mapper(),
- clause=querycontext.statement,
+ conn = self._get_bind_args(
+ querycontext,
+ self._connection_from_session,
close_with_result=True)
result = conn.execute(querycontext.statement, self._params)
return loading.instances(querycontext.query, result, querycontext)
+ def _get_bind_args(self, querycontext, fn, **kw):
+ return fn(
+ mapper=self._bind_mapper(),
+ clause=querycontext.statement,
+ **kw
+ )
+
@property
def column_descriptions(self):
"""Return metadata about the columns which would be
@@ -3358,8 +3373,6 @@ class Query(object):
sql.True_._ifnone(context.whereclause),
single_crit)
- def __str__(self):
- return str(self._compile_context().statement)
from ..sql.selectable import ForUpdateArg
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index f822071c4..83856eebf 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -1817,15 +1817,16 @@ class RelationshipProperty(StrategizedProperty):
backref_key, kwargs = self.backref
mapper = self.mapper.primary_mapper()
- check = set(mapper.iterate_to_root()).\
- union(mapper.self_and_descendants)
- for m in check:
- if m.has_property(backref_key):
- raise sa_exc.ArgumentError(
- "Error creating backref "
- "'%s' on relationship '%s': property of that "
- "name exists on mapper '%s'" %
- (backref_key, self, m))
+ if not mapper.concrete:
+ check = set(mapper.iterate_to_root()).\
+ union(mapper.self_and_descendants)
+ for m in check:
+ if m.has_property(backref_key) and not m.concrete:
+ raise sa_exc.ArgumentError(
+ "Error creating backref "
+ "'%s' on relationship '%s': property of that "
+ "name exists on mapper '%s'" %
+ (backref_key, self, m))
# determine primaryjoin/secondaryjoin for the
# backref. Use the one we had, so that
diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py
index b3f2fa5db..176523c3b 100644
--- a/lib/sqlalchemy/orm/scoping.py
+++ b/lib/sqlalchemy/orm/scoping.py
@@ -21,6 +21,12 @@ class scoped_session(object):
"""
+ session_factory = None
+ """The `session_factory` provided to `__init__` is stored in this
+ attribute and may be accessed at a later time. This can be useful when
+ a new non-scoped :class:`.Session` or :class:`.Connection` to the
+ database is needed."""
+
def __init__(self, session_factory, scopefunc=None):
"""Construct a new :class:`.scoped_session`.
@@ -38,6 +44,7 @@ class scoped_session(object):
"""
self.session_factory = session_factory
+
if scopefunc:
self.registry = ScopedRegistry(session_factory, scopefunc)
else:
@@ -45,12 +52,12 @@ class scoped_session(object):
def __call__(self, **kw):
"""Return the current :class:`.Session`, creating it
- using the session factory if not present.
+ using the :attr:`.scoped_session.session_factory` if not present.
:param \**kw: Keyword arguments will be passed to the
- session factory callable, if an existing :class:`.Session`
- is not present. If the :class:`.Session` is present and
- keyword arguments have been passed,
+ :attr:`.scoped_session.session_factory` callable, if an existing
+ :class:`.Session` is not present. If the :class:`.Session` is present
+ and keyword arguments have been passed,
:exc:`~sqlalchemy.exc.InvalidRequestError` is raised.
"""
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index b60e47bb3..0252a65f9 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -238,7 +238,7 @@ class DeferredColumnLoader(LoaderStrategy):
(
loadopt and
self.group and
- loadopt.local_opts.get('undefer_group', False) == self.group
+ loadopt.local_opts.get('undefer_group_%s' % self.group, False)
)
or
(
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
index 3467328e3..aa818258a 100644
--- a/lib/sqlalchemy/orm/strategy_options.py
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -80,6 +80,8 @@ class Load(Generative, MapperOption):
def __init__(self, entity):
insp = inspect(entity)
self.path = insp._path_registry
+ # note that this .context is shared among all descendant
+ # Load objects
self.context = {}
self.local_opts = {}
@@ -88,6 +90,7 @@ class Load(Generative, MapperOption):
cloned.local_opts = {}
return cloned
+ is_opts_only = False
strategy = None
propagate_to_loaders = False
@@ -200,7 +203,7 @@ class Load(Generative, MapperOption):
self._set_path_strategy()
@_generative
- def set_column_strategy(self, attrs, strategy, opts=None):
+ def set_column_strategy(self, attrs, strategy, opts=None, opts_only=False):
strategy = self._coerce_strat(strategy)
for attr in attrs:
@@ -211,13 +214,34 @@ class Load(Generative, MapperOption):
cloned.propagate_to_loaders = True
if opts:
cloned.local_opts.update(opts)
+ if opts_only:
+ cloned.is_opts_only = True
cloned._set_path_strategy()
+ def _set_for_path(self, context, path, replace=True, merge_opts=False):
+ if merge_opts or not replace:
+ existing = path.get(self.context, "loader")
+
+ if existing:
+ if merge_opts:
+ existing.local_opts.update(self.local_opts)
+ else:
+ path.set(context, "loader", self)
+ else:
+ existing = path.get(self.context, "loader")
+ path.set(context, "loader", self)
+ if existing and existing.is_opts_only:
+ self.local_opts.update(existing.local_opts)
+
def _set_path_strategy(self):
if self.path.has_entity:
- self.path.parent.set(self.context, "loader", self)
+ effective_path = self.path.parent
else:
- self.path.set(self.context, "loader", self)
+ effective_path = self.path
+
+ self._set_for_path(
+ self.context, effective_path, replace=True,
+ merge_opts=self.is_opts_only)
def __getstate__(self):
d = self.__dict__.copy()
@@ -305,7 +329,7 @@ class _UnboundLoad(Load):
val._bind_loader(query, query._attributes, raiseerr)
@classmethod
- def _from_keys(self, meth, keys, chained, kw):
+ def _from_keys(cls, meth, keys, chained, kw):
opt = _UnboundLoad()
def _split_key(key):
@@ -390,6 +414,7 @@ class _UnboundLoad(Load):
loader = Load(path_element)
loader.context = context
loader.strategy = self.strategy
+ loader.is_opts_only = self.is_opts_only
path = loader.path
for token in start_path:
@@ -411,15 +436,15 @@ class _UnboundLoad(Load):
if effective_path.is_token:
for path in effective_path.generate_for_superclasses():
- if self._is_chain_link:
- path.setdefault(context, "loader", loader)
- else:
- path.set(context, "loader", loader)
+ loader._set_for_path(
+ context, path,
+ replace=not self._is_chain_link,
+ merge_opts=self.is_opts_only)
else:
- if self._is_chain_link:
- effective_path.setdefault(context, "loader", loader)
- else:
- effective_path.set(context, "loader", loader)
+ loader._set_for_path(
+ context, effective_path,
+ replace=not self._is_chain_link,
+ merge_opts=self.is_opts_only)
def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
if _is_aliased_class(mapper):
@@ -1028,7 +1053,8 @@ def undefer_group(loadopt, name):
return loadopt.set_column_strategy(
"*",
None,
- {"undefer_group": name}
+ {"undefer_group_%s" % name: True},
+ opts_only=True
)
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index fa2cf2399..f4ad3ec00 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -66,6 +66,7 @@ from .expression import (
union,
union_all,
update,
+ within_group
)
from .visitors import ClauseVisitor
diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py
index eed079238..48b9a8a2b 100644
--- a/lib/sqlalchemy/sql/base.py
+++ b/lib/sqlalchemy/sql/base.py
@@ -449,11 +449,10 @@ class ColumnCollection(util.OrderedProperties):
"""
- __slots__ = '_all_col_set', '_all_columns'
+ __slots__ = '_all_columns'
def __init__(self, *columns):
super(ColumnCollection, self).__init__()
- object.__setattr__(self, '_all_col_set', util.column_set())
object.__setattr__(self, '_all_columns', [])
for c in columns:
self.add(c)
@@ -482,14 +481,11 @@ class ColumnCollection(util.OrderedProperties):
other = self[column.name]
if other.name == other.key:
remove_col = other
- self._all_col_set.remove(other)
del self._data[other.key]
if column.key in self._data:
remove_col = self._data[column.key]
- self._all_col_set.remove(remove_col)
- self._all_col_set.add(column)
self._data[column.key] = column
if remove_col is not None:
self._all_columns[:] = [column if c is remove_col
@@ -534,7 +530,6 @@ class ColumnCollection(util.OrderedProperties):
# in a _make_proxy operation
util.memoized_property.reset(value, "proxy_set")
- self._all_col_set.add(value)
self._all_columns.append(value)
self._data[key] = value
@@ -543,22 +538,20 @@ class ColumnCollection(util.OrderedProperties):
def remove(self, column):
del self._data[column.key]
- self._all_col_set.remove(column)
self._all_columns[:] = [
c for c in self._all_columns if c is not column]
def update(self, iter):
cols = list(iter)
+ all_col_set = set(self._all_columns)
self._all_columns.extend(
- c for label, c in cols if c not in self._all_col_set)
- self._all_col_set.update(c for label, c in cols)
+ c for label, c in cols if c not in all_col_set)
self._data.update((label, c) for label, c in cols)
def extend(self, iter):
cols = list(iter)
- self._all_columns.extend(c for c in cols if c not in
- self._all_col_set)
- self._all_col_set.update(cols)
+ all_col_set = set(self._all_columns)
+ self._all_columns.extend(c for c in cols if c not in all_col_set)
self._data.update((c.key, c) for c in cols)
__hash__ = None
@@ -584,22 +577,18 @@ class ColumnCollection(util.OrderedProperties):
def __setstate__(self, state):
object.__setattr__(self, '_data', state['_data'])
object.__setattr__(self, '_all_columns', state['_all_columns'])
- object.__setattr__(
- self, '_all_col_set', util.column_set(state['_all_columns']))
def contains_column(self, col):
- # this has to be done via set() membership
- return col in self._all_col_set
+ existing = self._data.get(col.key)
+ return existing is not None and hash(existing) == hash(col)
def as_immutable(self):
- return ImmutableColumnCollection(
- self._data, self._all_col_set, self._all_columns)
+ return ImmutableColumnCollection(self._data, self._all_columns)
class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
- def __init__(self, data, colset, all_columns):
+ def __init__(self, data, all_columns):
util.ImmutableProperties.__init__(self, data)
- object.__setattr__(self, '_all_col_set', colset)
object.__setattr__(self, '_all_columns', all_columns)
extend = remove = util.ImmutableProperties._immutable
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 6766c99b7..492999d16 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -167,25 +167,39 @@ class Compiled(object):
_cached_metadata = None
def __init__(self, dialect, statement, bind=None,
+ schema_translate_map=None,
compile_kwargs=util.immutabledict()):
- """Construct a new ``Compiled`` object.
+ """Construct a new :class:`.Compiled` object.
- :param dialect: ``Dialect`` to compile against.
+ :param dialect: :class:`.Dialect` to compile against.
- :param statement: ``ClauseElement`` to be compiled.
+ :param statement: :class:`.ClauseElement` to be compiled.
:param bind: Optional Engine or Connection to compile this
statement against.
+ :param schema_translate_map: dictionary of schema names to be
+ translated when forming the resultant SQL
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`schema_translating`
+
:param compile_kwargs: additional kwargs that will be
passed to the initial call to :meth:`.Compiled.process`.
- .. versionadded:: 0.8
"""
self.dialect = dialect
self.bind = bind
+ self.preparer = self.dialect.identifier_preparer
+ if schema_translate_map:
+ self.preparer = self.preparer._with_schema_translate(
+ schema_translate_map)
+
if statement is not None:
self.statement = statement
self.can_execute = statement.supports_execution
@@ -286,12 +300,11 @@ class _CompileLabel(visitors.Visitable):
def self_group(self, **kw):
return self
-class SQLCompiler(Compiled):
- """Default implementation of Compiled.
+class SQLCompiler(Compiled):
+ """Default implementation of :class:`.Compiled`.
- Compiles ClauseElements into SQL strings. Uses a similar visit
- paradigm as visitors.ClauseVisitor but implements its own traversal.
+ Compiles :class:`.ClauseElement` objects into SQL strings.
"""
@@ -305,6 +318,8 @@ class SQLCompiler(Compiled):
INSERT/UPDATE/DELETE
"""
+ isplaintext = False
+
returning = None
"""holds the "returning" collection of columns if
the statement is CRUD and defines returning columns
@@ -330,19 +345,34 @@ class SQLCompiler(Compiled):
driver/DB enforces this
"""
+ _textual_ordered_columns = False
+ """tell the result object that the column names as rendered are important,
+ but they are also "ordered" vs. what is in the compiled object here.
+ """
+
+ _ordered_columns = True
+ """
+ if False, means we can't be sure the list of entries
+ in _result_columns is actually the rendered order. Usually
+ True unless using an unordered TextAsFrom.
+ """
+
def __init__(self, dialect, statement, column_keys=None,
inline=False, **kwargs):
- """Construct a new ``DefaultCompiler`` object.
+ """Construct a new :class:`.SQLCompiler` object.
- dialect
- Dialect to be used
+ :param dialect: :class:`.Dialect` to be used
- statement
- ClauseElement to be compiled
+ :param statement: :class:`.ClauseElement` to be compiled
- column_keys
- a list of column names to be compiled into an INSERT or UPDATE
- statement.
+ :param column_keys: a list of column names to be compiled into an
+ INSERT or UPDATE statement.
+
+ :param inline: whether to generate INSERT statements as "inline", e.g.
+ not formatted to return any generated defaults
+
+ :param kwargs: additional keyword arguments to be consumed by the
+ superclass.
"""
self.column_keys = column_keys
@@ -368,11 +398,6 @@ class SQLCompiler(Compiled):
# column targeting
self._result_columns = []
- # if False, means we can't be sure the list of entries
- # in _result_columns is actually the rendered order. This
- # gets flipped when we use TextAsFrom, for example.
- self._ordered_columns = True
-
# true if the paramstyle is positional
self.positional = dialect.positional
if self.positional:
@@ -381,8 +406,6 @@ class SQLCompiler(Compiled):
self.ctes = None
- # an IdentifierPreparer that formats the quoting of identifiers
- self.preparer = dialect.identifier_preparer
self.label_length = dialect.label_length \
or dialect.max_identifier_length
@@ -649,8 +672,11 @@ class SQLCompiler(Compiled):
if table is None or not include_table or not table.named_with_column:
return name
else:
- if table.schema:
- schema_prefix = self.preparer.quote_schema(table.schema) + '.'
+ effective_schema = self.preparer.schema_for_object(table)
+
+ if effective_schema:
+ schema_prefix = self.preparer.quote_schema(
+ effective_schema) + '.'
else:
schema_prefix = ''
tablename = table.name
@@ -688,6 +714,9 @@ class SQLCompiler(Compiled):
else:
return self.bindparam_string(name, **kw)
+ if not self.stack:
+ self.isplaintext = True
+
# un-escape any \:params
return BIND_PARAMS_ESC.sub(
lambda m: m.group(1),
@@ -711,7 +740,8 @@ class SQLCompiler(Compiled):
) or entry.get('need_result_map_for_nested', False)
if populate_result_map:
- self._ordered_columns = False
+ self._ordered_columns = \
+ self._textual_ordered_columns = taf.positional
for c in taf.column_args:
self.process(c, within_columns_clause=True,
add_to_result_map=self._add_to_result_map)
@@ -873,22 +903,28 @@ class SQLCompiler(Compiled):
else:
return text
+ def _get_operator_dispatch(self, operator_, qualifier1, qualifier2):
+ attrname = "visit_%s_%s%s" % (
+ operator_.__name__, qualifier1,
+ "_" + qualifier2 if qualifier2 else "")
+ return getattr(self, attrname, None)
+
def visit_unary(self, unary, **kw):
if unary.operator:
if unary.modifier:
raise exc.CompileError(
"Unary expression does not support operator "
"and modifier simultaneously")
- disp = getattr(self, "visit_%s_unary_operator" %
- unary.operator.__name__, None)
+ disp = self._get_operator_dispatch(
+ unary.operator, "unary", "operator")
if disp:
return disp(unary, unary.operator, **kw)
else:
return self._generate_generic_unary_operator(
unary, OPERATORS[unary.operator], **kw)
elif unary.modifier:
- disp = getattr(self, "visit_%s_unary_modifier" %
- unary.modifier.__name__, None)
+ disp = self._get_operator_dispatch(
+ unary.modifier, "unary", "modifier")
if disp:
return disp(unary, unary.modifier, **kw)
else:
@@ -922,7 +958,7 @@ class SQLCompiler(Compiled):
kw['literal_binds'] = True
operator_ = override_operator or binary.operator
- disp = getattr(self, "visit_%s_binary" % operator_.__name__, None)
+ disp = self._get_operator_dispatch(operator_, "binary", None)
if disp:
return disp(binary, operator_, **kw)
else:
@@ -1298,7 +1334,7 @@ class SQLCompiler(Compiled):
add_to_result_map = lambda keyname, name, objects, type_: \
self._add_to_result_map(
keyname, name,
- objects + (column,), type_)
+ (column,) + objects, type_)
else:
col_expr = column
if populate_result_map:
@@ -1386,7 +1422,7 @@ class SQLCompiler(Compiled):
"""Rewrite any "a JOIN (b JOIN c)" expression as
"a JOIN (select * from b JOIN c) AS anon", to support
databases that can't parse a parenthesized join correctly
- (i.e. sqlite the main one).
+ (i.e. sqlite < 3.7.16).
"""
cloned = {}
@@ -1801,8 +1837,10 @@ class SQLCompiler(Compiled):
def visit_table(self, table, asfrom=False, iscrud=False, ashint=False,
fromhints=None, use_schema=True, **kwargs):
if asfrom or ashint:
- if use_schema and getattr(table, "schema", None):
- ret = self.preparer.quote_schema(table.schema) + \
+ effective_schema = self.preparer.schema_for_object(table)
+
+ if use_schema and effective_schema:
+ ret = self.preparer.quote_schema(effective_schema) + \
"." + self.preparer.quote(table.name)
else:
ret = self.preparer.quote(table.name)
@@ -2080,6 +2118,30 @@ class SQLCompiler(Compiled):
self.preparer.format_savepoint(savepoint_stmt)
+class StrSQLCompiler(SQLCompiler):
+ """"a compiler subclass with a few non-standard SQL features allowed.
+
+ Used for stringification of SQL statements when a real dialect is not
+ available.
+
+ """
+
+ def visit_getitem_binary(self, binary, operator, **kw):
+ return "%s[%s]" % (
+ self.process(binary.left, **kw),
+ self.process(binary.right, **kw)
+ )
+
+ def returning_clause(self, stmt, returning_cols):
+
+ columns = [
+ self._label_select_column(None, c, True, False, {})
+ for c in elements._select_iterables(returning_cols)
+ ]
+
+ return 'RETURNING ' + ', '.join(columns)
+
+
class DDLCompiler(Compiled):
@util.memoized_property
@@ -2090,10 +2152,6 @@ class DDLCompiler(Compiled):
def type_compiler(self):
return self.dialect.type_compiler
- @property
- def preparer(self):
- return self.dialect.identifier_preparer
-
def construct_params(self, params=None):
return None
@@ -2103,7 +2161,7 @@ class DDLCompiler(Compiled):
if isinstance(ddl.target, schema.Table):
context = context.copy()
- preparer = self.dialect.identifier_preparer
+ preparer = self.preparer
path = preparer.format_table_seq(ddl.target)
if len(path) == 1:
table, sch = path[0], ''
@@ -2129,7 +2187,7 @@ class DDLCompiler(Compiled):
def visit_create_table(self, create):
table = create.element
- preparer = self.dialect.identifier_preparer
+ preparer = self.preparer
text = "\nCREATE "
if table._prefixes:
@@ -2256,9 +2314,12 @@ class DDLCompiler(Compiled):
index, include_schema=True)
def _prepared_index_name(self, index, include_schema=False):
- if include_schema and index.table is not None and index.table.schema:
- schema = index.table.schema
- schema_name = self.preparer.quote_schema(schema)
+ if index.table is not None:
+ effective_schema = self.preparer.schema_for_object(index.table)
+ else:
+ effective_schema = None
+ if include_schema and effective_schema:
+ schema_name = self.preparer.quote_schema(effective_schema)
else:
schema_name = None
@@ -2386,7 +2447,7 @@ class DDLCompiler(Compiled):
return text
def visit_foreign_key_constraint(self, constraint):
- preparer = self.dialect.identifier_preparer
+ preparer = self.preparer
text = ""
if constraint.name is not None:
formatted_name = self.preparer.format_constraint(constraint)
@@ -2603,6 +2664,17 @@ class GenericTypeCompiler(TypeCompiler):
return type_.get_col_spec(**kw)
+class StrSQLTypeCompiler(GenericTypeCompiler):
+ def __getattr__(self, key):
+ if key.startswith("visit_"):
+ return self._visit_unknown
+ else:
+ raise AttributeError(key)
+
+ def _visit_unknown(self, type_, **kw):
+ return "%s" % type_.__class__.__name__
+
+
class IdentifierPreparer(object):
"""Handle quoting and case-folding of identifiers based on options."""
@@ -2613,6 +2685,8 @@ class IdentifierPreparer(object):
illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS
+ schema_for_object = schema._schema_getter(None)
+
def __init__(self, dialect, initial_quote='"',
final_quote=None, escape_quote='"', omit_schema=False):
"""Construct a new ``IdentifierPreparer`` object.
@@ -2637,6 +2711,12 @@ class IdentifierPreparer(object):
self.omit_schema = omit_schema
self._strings = {}
+ def _with_schema_translate(self, schema_translate_map):
+ prep = self.__class__.__new__(self.__class__)
+ prep.__dict__.update(self.__dict__)
+ prep.schema_for_object = schema._schema_getter(schema_translate_map)
+ return prep
+
def _escape_identifier(self, value):
"""Escape an identifier.
@@ -2709,9 +2789,12 @@ class IdentifierPreparer(object):
def format_sequence(self, sequence, use_schema=True):
name = self.quote(sequence.name)
+
+ effective_schema = self.schema_for_object(sequence)
+
if (not self.omit_schema and use_schema and
- sequence.schema is not None):
- name = self.quote_schema(sequence.schema) + "." + name
+ effective_schema is not None):
+ name = self.quote_schema(effective_schema) + "." + name
return name
def format_label(self, label, name=None):
@@ -2740,9 +2823,12 @@ class IdentifierPreparer(object):
if name is None:
name = table.name
result = self.quote(name)
+
+ effective_schema = self.schema_for_object(table)
+
if not self.omit_schema and use_schema \
- and getattr(table, "schema", None):
- result = self.quote_schema(table.schema) + "." + result
+ and effective_schema:
+ result = self.quote_schema(effective_schema) + "." + result
return result
def format_schema(self, name, quote=None):
@@ -2781,9 +2867,11 @@ class IdentifierPreparer(object):
# ('database', 'owner', etc.) could override this and return
# a longer sequence.
+ effective_schema = self.schema_for_object(table)
+
if not self.omit_schema and use_schema and \
- getattr(table, 'schema', None):
- return (self.quote_schema(table.schema),
+ effective_schema:
+ return (self.quote_schema(effective_schema),
self.format_table(table, use_schema=False))
else:
return (self.format_table(table, use_schema=False), )
diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py
index 71018f132..7953b61b8 100644
--- a/lib/sqlalchemy/sql/ddl.py
+++ b/lib/sqlalchemy/sql/ddl.py
@@ -679,13 +679,16 @@ class SchemaGenerator(DDLBase):
def _can_create_table(self, table):
self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
+ effective_schema = self.connection.schema_for_object(table)
+ if effective_schema:
+ self.dialect.validate_identifier(effective_schema)
return not self.checkfirst or \
not self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
+ table.name, schema=effective_schema)
def _can_create_sequence(self, sequence):
+ effective_schema = self.connection.schema_for_object(sequence)
+
return self.dialect.supports_sequences and \
(
(not self.dialect.sequences_optional or
@@ -695,7 +698,7 @@ class SchemaGenerator(DDLBase):
not self.dialect.has_sequence(
self.connection,
sequence.name,
- schema=sequence.schema)
+ schema=effective_schema)
)
)
@@ -882,12 +885,14 @@ class SchemaDropper(DDLBase):
def _can_drop_table(self, table):
self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
+ effective_schema = self.connection.schema_for_object(table)
+ if effective_schema:
+ self.dialect.validate_identifier(effective_schema)
return not self.checkfirst or self.dialect.has_table(
- self.connection, table.name, schema=table.schema)
+ self.connection, table.name, schema=effective_schema)
def _can_drop_sequence(self, sequence):
+ effective_schema = self.connection.schema_for_object(sequence)
return self.dialect.supports_sequences and \
((not self.dialect.sequences_optional or
not sequence.optional) and
@@ -895,7 +900,7 @@ class SchemaDropper(DDLBase):
self.dialect.has_sequence(
self.connection,
sequence.name,
- schema=sequence.schema))
+ schema=effective_schema))
)
def visit_index(self, index):
diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py
index 68ea5624e..ddb57da77 100644
--- a/lib/sqlalchemy/sql/default_comparator.py
+++ b/lib/sqlalchemy/sql/default_comparator.py
@@ -164,27 +164,7 @@ def _in_impl(expr, op, seq_or_selectable, negate_op, **kw):
def _getitem_impl(expr, op, other, **kw):
if isinstance(expr.type, type_api.INDEXABLE):
- if isinstance(other, slice):
- if expr.type.zero_indexes:
- other = slice(
- other.start + 1,
- other.stop + 1,
- other.step
- )
- other = Slice(
- _literal_as_binds(
- other.start, name=expr.key, type_=type_api.INTEGERTYPE),
- _literal_as_binds(
- other.stop, name=expr.key, type_=type_api.INTEGERTYPE),
- _literal_as_binds(
- other.step, name=expr.key, type_=type_api.INTEGERTYPE)
- )
- else:
- if expr.type.zero_indexes:
- other += 1
-
- other = _literal_as_binds(
- other, name=expr.key, type_=type_api.INTEGERTYPE)
+ other = _check_literal(expr, op, other)
return _binary_operate(expr, op, other, **kw)
else:
_unsupported_impl(expr, op, other, **kw)
@@ -260,6 +240,8 @@ operator_lookup = {
"mod": (_binary_operate,),
"truediv": (_binary_operate,),
"custom_op": (_binary_operate,),
+ "json_path_getitem_op": (_binary_operate, ),
+ "json_getitem_op": (_binary_operate, ),
"concat_op": (_binary_operate,),
"lt": (_boolean_compare, operators.ge),
"le": (_boolean_compare, operators.gt),
@@ -295,7 +277,7 @@ operator_lookup = {
}
-def _check_literal(expr, operator, other):
+def _check_literal(expr, operator, other, bindparam_type=None):
if isinstance(other, (ColumnElement, TextClause)):
if isinstance(other, BindParameter) and \
other.type._isnull:
@@ -310,7 +292,7 @@ def _check_literal(expr, operator, other):
if isinstance(other, (SelectBase, Alias)):
return other.as_scalar()
elif not isinstance(other, Visitable):
- return expr._bind_param(operator, other)
+ return expr._bind_param(operator, other, type_=bindparam_type)
else:
return other
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index 70046c66b..fe2fecce8 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -429,7 +429,7 @@ class ClauseElement(Visitable):
dialect = self.bind.dialect
bind = self.bind
else:
- dialect = default.DefaultDialect()
+ dialect = default.StrCompileDialect()
return self._compiler(dialect, bind=bind, **kw)
def _compiler(self, dialect, **kw):
@@ -682,9 +682,10 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
def reverse_operate(self, op, other, **kwargs):
return op(other, self.comparator, **kwargs)
- def _bind_param(self, operator, obj):
+ def _bind_param(self, operator, obj, type_=None):
return BindParameter(None, obj,
_compared_to_operator=operator,
+ type_=type_,
_compared_to_type=self.type, unique=True)
@property
@@ -1275,17 +1276,16 @@ class TextClause(Executable, ClauseElement):
for id, name in connection.execute(t):
print(id, name)
- The :func:`.text` construct is used internally in cases when
- a literal string is specified for part of a larger query, such as
- when a string is specified to the :meth:`.Select.where` method of
- :class:`.Select`. In those cases, the same
- bind parameter syntax is applied::
+ The :func:`.text` construct is used in cases when
+ a literal string SQL fragment is specified as part of a larger query,
+ such as for the WHERE clause of a SELECT statement::
- s = select([users.c.id, users.c.name]).where("id=:user_id")
+ s = select([users.c.id, users.c.name]).where(text("id=:user_id"))
result = connection.execute(s, user_id=12)
- Using :func:`.text` explicitly usually implies the construction
- of a full, standalone statement. As such, SQLAlchemy refers
+ :func:`.text` is also used for the construction
+ of a full, standalone statement using plain text.
+ As such, SQLAlchemy refers
to it as an :class:`.Executable` object, and it supports
the :meth:`Executable.execution_options` method. For example,
a :func:`.text` construct that should be subject to "autocommit"
@@ -1360,6 +1360,12 @@ class TextClause(Executable, ClauseElement):
.. deprecated:: 0.9.0 the :meth:`.TextClause.columns` method
supersedes the ``typemap`` argument to :func:`.text`.
+ .. seealso::
+
+ :ref:`sqlexpression_text` - in the Core tutorial
+
+ :ref:`orm_tutorial_literal_sql` - in the ORM tutorial
+
"""
stmt = TextClause(text, bind=bind)
if bindparams:
@@ -1485,9 +1491,17 @@ class TextClause(Executable, ClauseElement):
mytable.join(stmt, mytable.c.name == stmt.c.name)
).where(stmt.c.id > 5)
- Above, we used untyped :func:`.column` elements. These can also have
- types specified, which will impact how the column behaves in
- expressions as well as determining result set behavior::
+ Above, we pass a series of :func:`.column` elements to the
+ :meth:`.TextClause.columns` method positionally. These :func:`.column`
+ elements now become first class elements upon the :attr:`.TextAsFrom.c`
+ column collection, just like any other selectable.
+
+ The column expressions we pass to :meth:`.TextClause.columns` may
+ also be typed; when we do so, these :class:`.TypeEngine` objects become
+ the effective return type of the column, so that SQLAlchemy's
+ result-set-processing systems may be used on the return values.
+ This is often needed for types such as date or boolean types, as well
+ as for unicode processing on some dialect configurations::
stmt = text("SELECT id, name, timestamp FROM some_table")
stmt = stmt.columns(
@@ -1499,9 +1513,8 @@ class TextClause(Executable, ClauseElement):
for id, name, timestamp in connection.execute(stmt):
print(id, name, timestamp)
- Keyword arguments allow just the names and types of columns to be
- specified, where the :func:`.column` elements will be generated
- automatically::
+ As a shortcut to the above syntax, keyword arguments referring to
+ types alone may be used, if only type conversion is needed::
stmt = text("SELECT id, name, timestamp FROM some_table")
stmt = stmt.columns(
@@ -1513,6 +1526,31 @@ class TextClause(Executable, ClauseElement):
for id, name, timestamp in connection.execute(stmt):
print(id, name, timestamp)
+ The positional form of :meth:`.TextClause.columns` also provides
+ the unique feature of **positional column targeting**, which is
+ particularly useful when using the ORM with complex textual queries.
+ If we specify the columns from our model to :meth:`.TextClause.columns`,
+ the result set will match to those columns positionally, meaning the
+ name or origin of the column in the textual SQL doesn't matter::
+
+ stmt = text("SELECT users.id, addresses.id, users.id, "
+ "users.name, addresses.email_address AS email "
+ "FROM users JOIN addresses ON users.id=addresses.user_id "
+ "WHERE users.id = 1").columns(
+ User.id,
+ Address.id,
+ Address.user_id,
+ User.name,
+ Address.email_address
+ )
+
+ query = session.query(User).from_statement(stmt).options(
+ contains_eager(User.addresses))
+
+ .. versionadded:: 1.1 the :meth:`.TextClause.columns` method now
+ offers positional column targeting in the result set when
+ the column expressions are passed purely positionally.
+
The :meth:`.TextClause.columns` method provides a direct
route to calling :meth:`.FromClause.alias` as well as
:meth:`.SelectBase.cte` against a textual SELECT statement::
@@ -1526,15 +1564,22 @@ class TextClause(Executable, ClauseElement):
:meth:`.TextClause.columns` method. This method supersedes the
``typemap`` argument to :func:`.text`.
+
"""
- input_cols = [
+ positional_input_cols = [
ColumnClause(col.key, types.pop(col.key))
if col.key in types
else col
for col in cols
- ] + [ColumnClause(key, type_) for key, type_ in types.items()]
- return selectable.TextAsFrom(self, input_cols)
+ ]
+ keyed_input_cols = [
+ ColumnClause(key, type_) for key, type_ in types.items()]
+
+ return selectable.TextAsFrom(
+ self,
+ positional_input_cols + keyed_input_cols,
+ positional=bool(positional_input_cols) and not keyed_input_cols)
@property
def type(self):
@@ -1952,11 +1997,12 @@ class Tuple(ClauseList, ColumnElement):
def _select_iterable(self):
return (self, )
- def _bind_param(self, operator, obj):
+ def _bind_param(self, operator, obj, type_=None):
return Tuple(*[
BindParameter(None, o, _compared_to_operator=operator,
- _compared_to_type=type_, unique=True)
- for o, type_ in zip(obj, self._type_tuple)
+ _compared_to_type=compared_to_type, unique=True,
+ type_=type_)
+ for o, compared_to_type in zip(obj, self._type_tuple)
]).self_group()
@@ -3637,10 +3683,11 @@ class ColumnClause(Immutable, ColumnElement):
else:
return name
- def _bind_param(self, operator, obj):
+ def _bind_param(self, operator, obj, type_=None):
return BindParameter(self.key, obj,
_compared_to_operator=operator,
_compared_to_type=self.type,
+ type_=type_,
unique=True)
def _make_proxy(self, selectable, name=None, attach=True,
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index 6cfbd12b3..3c654bf67 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -256,16 +256,18 @@ class FunctionElement(Executable, ColumnElement, FromClause):
"""
return self.select().execute()
- def _bind_param(self, operator, obj):
+ def _bind_param(self, operator, obj, type_=None):
return BindParameter(None, obj, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
+ _compared_to_type=self.type, unique=True,
+ type_=type_)
def self_group(self, against=None):
# for the moment, we are parenthesizing all array-returning
# expressions against getitem. This may need to be made
# more portable if in the future we support other DBs
# besides postgresql.
- if against is operators.getitem:
+ if against is operators.getitem and \
+ isinstance(self.type, sqltypes.ARRAY):
return Grouping(self)
else:
return super(FunctionElement, self).self_group(against=against)
@@ -423,10 +425,11 @@ class Function(FunctionElement):
FunctionElement.__init__(self, *clauses, **kw)
- def _bind_param(self, operator, obj):
+ def _bind_param(self, operator, obj, type_=None):
return BindParameter(self.name, obj,
_compared_to_operator=operator,
_compared_to_type=self.type,
+ type_=type_,
unique=True)
@@ -659,7 +662,7 @@ class array_agg(GenericFunction):
"""support for the ARRAY_AGG function.
The ``func.array_agg(expr)`` construct returns an expression of
- type :class:`.Array`.
+ type :class:`.types.ARRAY`.
e.g.::
@@ -670,11 +673,11 @@ class array_agg(GenericFunction):
.. seealso::
:func:`.postgresql.array_agg` - PostgreSQL-specific version that
- returns :class:`.ARRAY`, which has PG-specific operators added.
+ returns :class:`.postgresql.ARRAY`, which has PG-specific operators added.
"""
- type = sqltypes.Array
+ type = sqltypes.ARRAY
def __init__(self, *args, **kwargs):
args = [_literal_as_binds(c) for c in args]
@@ -694,7 +697,7 @@ class OrderedSetAgg(GenericFunction):
func_clauses = self.clause_expr.element
order_by = sqlutil.unwrap_order_by(within_group.order_by)
if self.array_for_multi_clause and len(func_clauses.clauses) > 1:
- return sqltypes.Array(order_by[0].type)
+ return sqltypes.ARRAY(order_by[0].type)
else:
return order_by[0].type
@@ -719,7 +722,7 @@ class percentile_cont(OrderedSetAgg):
modifier to supply a sort expression to operate upon.
The return type of this function is the same as the sort expression,
- or if the arguments are an array, an :class:`.Array` of the sort
+ or if the arguments are an array, an :class:`.types.ARRAY` of the sort
expression's type.
.. versionadded:: 1.1
@@ -736,7 +739,7 @@ class percentile_disc(OrderedSetAgg):
modifier to supply a sort expression to operate upon.
The return type of this function is the same as the sort expression,
- or if the arguments are an array, an :class:`.Array` of the sort
+ or if the arguments are an array, an :class:`.types.ARRAY` of the sort
expression's type.
.. versionadded:: 1.1
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index da3576466..f4f90b664 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -12,7 +12,6 @@
from .. import util
-
from operator import (
and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg,
getitem, lshift, rshift
@@ -720,7 +719,6 @@ def istrue(a):
def isfalse(a):
raise NotImplementedError()
-
def is_(a, b):
return a.is_(b)
@@ -837,6 +835,14 @@ def nullslast_op(a):
return a.nullslast()
+def json_getitem_op(a, b):
+ raise NotImplementedError()
+
+
+def json_path_getitem_op(a, b):
+ raise NotImplementedError()
+
+
_commutative = set([eq, ne, add, mul])
_comparison = set([eq, ne, lt, gt, ge, le, between_op, like_op])
@@ -879,7 +885,8 @@ def mirror(op):
_associative = _commutative.union([concat_op, and_, or_])
-_natural_self_precedent = _associative.union([getitem])
+_natural_self_precedent = _associative.union([
+ getitem, json_getitem_op, json_path_getitem_op])
"""Operators where if we have (a op b) op c, we don't want to
parenthesize (a op b).
@@ -894,6 +901,8 @@ _PRECEDENCE = {
from_: 15,
any_op: 15,
all_op: 15,
+ json_getitem_op: 15,
+ json_path_getitem_op: 15,
getitem: 15,
mul: 8,
truediv: 8,
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index b244d746c..0626cb2b4 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -30,20 +30,19 @@ as components in SQL expressions.
"""
from __future__ import absolute_import
-import inspect
from .. import exc, util, event, inspection
from .base import SchemaEventTarget, DialectKWArgs
+import operator
from . import visitors
from . import type_api
from .base import _bind_or_error, ColumnCollection
-from .elements import ClauseElement, ColumnClause, _truncated_label, \
+from .elements import ClauseElement, ColumnClause, \
_as_truncated, TextClause, _literal_as_text,\
- ColumnElement, _find_columns, quoted_name
+ ColumnElement, quoted_name
from .selectable import TableClause
import collections
import sqlalchemy
from . import ddl
-import types
RETAIN_SCHEMA = util.symbol('retain_schema')
@@ -3862,3 +3861,52 @@ class ThreadLocalMetaData(MetaData):
for e in self.__engines.values():
if hasattr(e, 'dispose'):
e.dispose()
+
+
+class _SchemaTranslateMap(object):
+ """Provide translation of schema names based on a mapping.
+
+ Also provides helpers for producing cache keys and optimized
+ access when no mapping is present.
+
+ Used by the :paramref:`.Connection.execution_options.schema_translate_map`
+ feature.
+
+ .. versionadded:: 1.1
+
+
+ """
+ __slots__ = 'map_', '__call__', 'hash_key', 'is_default'
+
+ _default_schema_getter = operator.attrgetter("schema")
+
+ def __init__(self, map_):
+ self.map_ = map_
+ if map_ is not None:
+ def schema_for_object(obj):
+ effective_schema = self._default_schema_getter(obj)
+ effective_schema = map_.get(effective_schema, effective_schema)
+ return effective_schema
+ self.__call__ = schema_for_object
+ self.hash_key = ";".join(
+ "%s=%s" % (k, map_[k])
+ for k in sorted(map_, key=str)
+ )
+ self.is_default = False
+ else:
+ self.hash_key = 0
+ self.__call__ = self._default_schema_getter
+ self.is_default = True
+
+ @classmethod
+ def _schema_getter(cls, map_):
+ if map_ is None:
+ return _default_schema_map
+ elif isinstance(map_, _SchemaTranslateMap):
+ return map_
+ else:
+ return _SchemaTranslateMap(map_)
+
+_default_schema_map = _SchemaTranslateMap(None)
+_schema_getter = _SchemaTranslateMap._schema_getter
+
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index 73341053d..1955fc934 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -3420,9 +3420,10 @@ class TextAsFrom(SelectBase):
_textual = True
- def __init__(self, text, columns):
+ def __init__(self, text, columns, positional=False):
self.element = text
self.column_args = columns
+ self.positional = positional
@property
def _bind(self):
diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py
index 4abb9b15a..84bfca026 100644
--- a/lib/sqlalchemy/sql/sqltypes.py
+++ b/lib/sqlalchemy/sql/sqltypes.py
@@ -11,9 +11,13 @@
import datetime as dt
import codecs
+import collections
+import json
+from . import elements
from .type_api import TypeEngine, TypeDecorator, to_instance
-from .elements import quoted_name, TypeCoerce as type_coerce, _defer_name
+from .elements import quoted_name, TypeCoerce as type_coerce, _defer_name, \
+ Slice, _literal_as_binds
from .. import exc, util, processors
from .base import _bind_or_error, SchemaEventTarget
from . import operators
@@ -85,20 +89,16 @@ class Indexable(object):
"""
- zero_indexes = False
- """if True, Python zero-based indexes should be interpreted as one-based
- on the SQL expression side."""
-
class Comparator(TypeEngine.Comparator):
def _setup_getitem(self, index):
raise NotImplementedError()
def __getitem__(self, index):
- operator, adjusted_right_expr, result_type = \
+ adjusted_op, adjusted_right_expr, result_type = \
self._setup_getitem(index)
return self.operate(
- operator,
+ adjusted_op,
adjusted_right_expr,
result_type=result_type
)
@@ -909,9 +909,9 @@ class LargeBinary(_Binary):
"""A type for large binary byte data.
- The Binary type generates BLOB or BYTEA when tables are created,
- and also converts incoming values using the ``Binary`` callable
- provided by each DB-API.
+ The :class:`.LargeBinary` type corresponds to a large and/or unlengthed
+ binary type for the target platform, such as BLOB on MySQL and BYTEA for
+ Postgresql. It also handles the necessary conversions for the DBAPI.
"""
@@ -922,13 +922,8 @@ class LargeBinary(_Binary):
Construct a LargeBinary type.
:param length: optional, a length for the column for use in
- DDL statements, for those BLOB types that accept a length
- (i.e. MySQL). It does *not* produce a *lengthed* BINARY/VARBINARY
- type - use the BINARY/VARBINARY types specifically for those.
- May be safely omitted if no ``CREATE
- TABLE`` will be issued. Certain databases may require a
- *length* for use in DDL, and will raise an exception when
- the ``CREATE TABLE`` DDL is issued.
+ DDL statements, for those binary types that accept a length,
+ such as the MySQL BLOB type.
"""
_Binary.__init__(self, length=length)
@@ -1496,7 +1491,221 @@ class Interval(_DateAffinity, TypeDecorator):
return self.impl.coerce_compared_value(op, value)
-class Array(Indexable, Concatenable, TypeEngine):
+class JSON(Indexable, TypeEngine):
+ """Represent a SQL JSON type.
+
+ .. note:: :class:`.types.JSON` is provided as a facade for vendor-specific
+ JSON types. Since it supports JSON SQL operations, it only
+ works on backends that have an actual JSON type, currently
+ Postgresql as well as certain versions of MySQL.
+
+ :class:`.types.JSON` is part of the Core in support of the growing
+ popularity of native JSON datatypes.
+
+ The :class:`.types.JSON` type stores arbitrary JSON format data, e.g.::
+
+ data_table = Table('data_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', JSON)
+ )
+
+ with engine.connect() as conn:
+ conn.execute(
+ data_table.insert(),
+ data = {"key1": "value1", "key2": "value2"}
+ )
+
+ The base :class:`.types.JSON` provides these two operations:
+
+ * Keyed index operations::
+
+ data_table.c.data['some key']
+
+ * Integer index operations::
+
+ data_table.c.data[3]
+
+ * Path index operations::
+
+ data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')]
+
+ Additional operations are available from the dialect-specific versions
+ of :class:`.types.JSON`, such as :class:`.postgresql.JSON` and
+ :class:`.postgresql.JSONB`, each of which offer more operators than
+ just the basic type.
+
+ Index operations return an expression object whose type defaults to
+ :class:`.JSON` by default, so that further JSON-oriented instructions
+ may be called upon the result type.
+
+ The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not
+ detect in-place mutations to the structure. In order to detect these, the
+ :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
+ allow "in-place" changes to the datastructure to produce events which
+ will be detected by the unit of work. See the example at :class:`.HSTORE`
+ for a simple example involving a dictionary.
+
+ When working with NULL values, the :class:`.JSON` type recommends the
+ use of two specific constants in order to differentiate between a column
+ that evaluates to SQL NULL, e.g. no value, vs. the JSON-encoded string
+ of ``"null"``. To insert or select against a value that is SQL NULL,
+ use the constant :func:`.null`::
+
+ from sqlalchemy import null
+ conn.execute(table.insert(), json_value=null())
+
+ To insert or select against a value that is JSON ``"null"``, use the
+ constant :attr:`.JSON.NULL`::
+
+ conn.execute(table.insert(), json_value=JSON.NULL)
+
+ The :class:`.JSON` type supports a flag
+ :paramref:`.JSON.none_as_null` which when set to True will result
+ in the Python constant ``None`` evaluating to the value of SQL
+ NULL, and when set to False results in the Python constant
+ ``None`` evaluating to the value of JSON ``"null"``. The Python
+ value ``None`` may be used in conjunction with either
+ :attr:`.JSON.NULL` and :func:`.null` in order to indicate NULL
+ values, but care must be taken as to the value of the
+ :paramref:`.JSON.none_as_null` in these cases.
+
+ .. seealso::
+
+ :class:`.postgresql.JSON`
+
+ :class:`.postgresql.JSONB`
+
+ :class:`.mysql.JSON`
+
+ .. versionadded:: 1.1
+
+
+ """
+ __visit_name__ = 'JSON'
+
+ hashable = False
+ NULL = util.symbol('JSON_NULL')
+ """Describe the json value of NULL.
+
+ This value is used to force the JSON value of ``"null"`` to be
+ used as the value. A value of Python ``None`` will be recognized
+ either as SQL NULL or JSON ``"null"``, based on the setting
+ of the :paramref:`.JSON.none_as_null` flag; the :attr:`.JSON.NULL`
+ constant can be used to always resolve to JSON ``"null"`` regardless
+ of this setting. This is in contrast to the :func:`.sql.null` construct,
+ which always resolves to SQL NULL. E.g.::
+
+ from sqlalchemy import null
+ from sqlalchemy.dialects.postgresql import JSON
+
+ obj1 = MyObject(json_value=null()) # will *always* insert SQL NULL
+ obj2 = MyObject(json_value=JSON.NULL) # will *always* insert JSON string "null"
+
+ session.add_all([obj1, obj2])
+ session.commit()
+
+ """
+
+ def __init__(self, none_as_null=False):
+ """Construct a :class:`.types.JSON` type.
+
+ :param none_as_null=False: if True, persist the value ``None`` as a
+ SQL NULL value, not the JSON encoding of ``null``. Note that
+ when this flag is False, the :func:`.null` construct can still
+ be used to persist a NULL value::
+
+ from sqlalchemy import null
+ conn.execute(table.insert(), data=null())
+
+ .. seealso::
+
+ :attr:`.types.JSON.NULL`
+
+ """
+ self.none_as_null = none_as_null
+
+ class JSONIndexType(TypeEngine):
+ """Placeholder for the datatype of a JSON index value.
+
+ This allows execution-time processing of JSON index values
+ for special syntaxes.
+
+ """
+
+ class JSONPathType(TypeEngine):
+ """Placeholder type for JSON path operations.
+
+ This allows execution-time processing of a path-based
+ index value into a specific SQL syntax.
+
+ """
+
+ class Comparator(Indexable.Comparator, Concatenable.Comparator):
+ """Define comparison operations for :class:`.types.JSON`."""
+
+ @util.dependencies('sqlalchemy.sql.default_comparator')
+ def _setup_getitem(self, default_comparator, index):
+ if not isinstance(index, util.string_types) and \
+ isinstance(index, collections.Sequence):
+ index = default_comparator._check_literal(
+ self.expr, operators.json_path_getitem_op,
+ index, bindparam_type=JSON.JSONPathType
+ )
+
+ operator = operators.json_path_getitem_op
+ else:
+ index = default_comparator._check_literal(
+ self.expr, operators.json_getitem_op,
+ index, bindparam_type=JSON.JSONIndexType
+ )
+ operator = operators.json_getitem_op
+
+ return operator, index, self.type
+
+ comparator_factory = Comparator
+
+ @property
+ def should_evaluate_none(self):
+ return not self.none_as_null
+
+ @util.memoized_property
+ def _str_impl(self):
+ return String(convert_unicode=True)
+
+ def bind_processor(self, dialect):
+ string_process = self._str_impl.bind_processor(dialect)
+
+ json_serializer = dialect._json_serializer or json.dumps
+
+ def process(value):
+ if value is self.NULL:
+ value = None
+ elif isinstance(value, elements.Null) or (
+ value is None and self.none_as_null
+ ):
+ return None
+
+ serialized = json_serializer(value)
+ if string_process:
+ serialized = string_process(serialized)
+ return serialized
+
+ return process
+
+ def result_processor(self, dialect, coltype):
+ string_process = self._str_impl.result_processor(dialect, coltype)
+ json_deserializer = dialect._json_deserializer or json.loads
+
+ def process(value):
+ if value is None:
+ return None
+ if string_process:
+ value = string_process(value)
+ return json_deserializer(value)
+ return process
+
+
+class ARRAY(Indexable, Concatenable, TypeEngine):
"""Represent a SQL Array type.
.. note:: This type serves as the basis for all ARRAY operations.
@@ -1506,17 +1715,17 @@ class Array(Indexable, Concatenable, TypeEngine):
with PostgreSQL, as it provides additional operators specific
to that backend.
- :class:`.Array` is part of the Core in support of various SQL standard
+ :class:`.types.ARRAY` is part of the Core in support of various SQL standard
functions such as :class:`.array_agg` which explicitly involve arrays;
however, with the exception of the PostgreSQL backend and possibly
some third-party dialects, no other SQLAlchemy built-in dialect has
support for this type.
- An :class:`.Array` type is constructed given the "type"
+ An :class:`.types.ARRAY` type is constructed given the "type"
of element::
mytable = Table("mytable", metadata,
- Column("data", Array(Integer))
+ Column("data", ARRAY(Integer))
)
The above type represents an N-dimensional array,
@@ -1529,11 +1738,11 @@ class Array(Indexable, Concatenable, TypeEngine):
data=[1,2,3]
)
- The :class:`.Array` type can be constructed given a fixed number
+ The :class:`.types.ARRAY` type can be constructed given a fixed number
of dimensions::
mytable = Table("mytable", metadata,
- Column("data", Array(Integer, dimensions=2))
+ Column("data", ARRAY(Integer, dimensions=2))
)
Sending a number of dimensions is optional, but recommended if the
@@ -1555,10 +1764,10 @@ class Array(Indexable, Concatenable, TypeEngine):
>>> expr = table.c.column[5] # returns ARRAY(Integer, dimensions=1)
>>> expr = expr[6] # returns Integer
- For 1-dimensional arrays, an :class:`.Array` instance with no
+ For 1-dimensional arrays, an :class:`.types.ARRAY` instance with no
dimension parameter will generally assume single-dimensional behaviors.
- SQL expressions of type :class:`.Array` have support for "index" and
+ SQL expressions of type :class:`.types.ARRAY` have support for "index" and
"slice" behavior. The Python ``[]`` operator works normally here, given
integer indexes or slices. Arrays default to 1-based indexing.
The operator produces binary expression
@@ -1575,9 +1784,9 @@ class Array(Indexable, Concatenable, TypeEngine):
mytable.c.data[2:7]: [1, 2, 3]
})
- The :class:`.Array` type also provides for the operators
- :meth:`.Array.Comparator.any` and :meth:`.Array.Comparator.all`.
- The PostgreSQL-specific version of :class:`.Array` also provides additional
+ The :class:`.types.ARRAY` type also provides for the operators
+ :meth:`.types.ARRAY.Comparator.any` and :meth:`.types.ARRAY.Comparator.all`.
+ The PostgreSQL-specific version of :class:`.types.ARRAY` also provides additional
operators.
.. versionadded:: 1.1.0
@@ -1589,9 +1798,13 @@ class Array(Indexable, Concatenable, TypeEngine):
"""
__visit_name__ = 'ARRAY'
+ zero_indexes = False
+ """if True, Python zero-based indexes should be interpreted as one-based
+ on the SQL expression side."""
+
class Comparator(Indexable.Comparator, Concatenable.Comparator):
- """Define comparison operations for :class:`.Array`.
+ """Define comparison operations for :class:`.types.ARRAY`.
More operators are available on the dialect-specific form
of this type. See :class:`.postgresql.ARRAY.Comparator`.
@@ -1601,11 +1814,32 @@ class Array(Indexable, Concatenable, TypeEngine):
def _setup_getitem(self, index):
if isinstance(index, slice):
return_type = self.type
- elif self.type.dimensions is None or self.type.dimensions == 1:
- return_type = self.type.item_type
+ if self.type.zero_indexes:
+ index = slice(
+ index.start + 1,
+ index.stop + 1,
+ index.step
+ )
+ index = Slice(
+ _literal_as_binds(
+ index.start, name=self.expr.key,
+ type_=type_api.INTEGERTYPE),
+ _literal_as_binds(
+ index.stop, name=self.expr.key,
+ type_=type_api.INTEGERTYPE),
+ _literal_as_binds(
+ index.step, name=self.expr.key,
+ type_=type_api.INTEGERTYPE)
+ )
else:
- adapt_kw = {'dimensions': self.type.dimensions - 1}
- return_type = self.type.adapt(self.type.__class__, **adapt_kw)
+ if self.type.zero_indexes:
+ index += 1
+ if self.type.dimensions is None or self.type.dimensions == 1:
+ return_type = self.type.item_type
+ else:
+ adapt_kw = {'dimensions': self.type.dimensions - 1}
+ return_type = self.type.adapt(
+ self.type.__class__, **adapt_kw)
return operators.getitem, index, return_type
@@ -1635,7 +1869,7 @@ class Array(Indexable, Concatenable, TypeEngine):
:func:`.sql.expression.any_`
- :meth:`.Array.Comparator.all`
+ :meth:`.types.ARRAY.Comparator.all`
"""
operator = operator if operator else operators.eq
@@ -1670,7 +1904,7 @@ class Array(Indexable, Concatenable, TypeEngine):
:func:`.sql.expression.all_`
- :meth:`.Array.Comparator.any`
+ :meth:`.types.ARRAY.Comparator.any`
"""
operator = operator if operator else operators.eq
@@ -1683,18 +1917,18 @@ class Array(Indexable, Concatenable, TypeEngine):
def __init__(self, item_type, as_tuple=False, dimensions=None,
zero_indexes=False):
- """Construct an :class:`.Array`.
+ """Construct an :class:`.types.ARRAY`.
E.g.::
- Column('myarray', Array(Integer))
+ Column('myarray', ARRAY(Integer))
Arguments are:
:param item_type: The data type of items of this array. Note that
dimensionality is irrelevant here, so multi-dimensional arrays like
- ``INTEGER[][]``, are constructed as ``Array(Integer)``, not as
- ``Array(Array(Integer))`` or such.
+ ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
+ ``ARRAY(ARRAY(Integer))`` or such.
:param as_tuple=False: Specify whether return results
should be converted to tuples from lists. This parameter is
@@ -1706,7 +1940,7 @@ class Array(Indexable, Concatenable, TypeEngine):
on the database, how it goes about interpreting Python and
result values, as well as how expression behavior in conjunction
with the "getitem" operator works. See the description at
- :class:`.Array` for additional detail.
+ :class:`.types.ARRAY` for additional detail.
:param zero_indexes=False: when True, index values will be converted
between Python zero-based and SQL one-based indexes, e.g.
@@ -1714,7 +1948,7 @@ class Array(Indexable, Concatenable, TypeEngine):
to the database.
"""
- if isinstance(item_type, Array):
+ if isinstance(item_type, ARRAY):
raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
"handles multi-dimensional arrays of basetype")
if isinstance(item_type, type):
diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py
index bd6377eb7..d24f31321 100644
--- a/lib/sqlalchemy/testing/__init__.py
+++ b/lib/sqlalchemy/testing/__init__.py
@@ -22,7 +22,7 @@ from .assertions import emits_warning, emits_warning_on, uses_deprecated, \
eq_, ne_, le_, is_, is_not_, startswith_, assert_raises, \
assert_raises_message, AssertsCompiledSQL, ComparesTables, \
AssertsExecutionResults, expect_deprecated, expect_warnings, \
- in_, not_in_
+ in_, not_in_, eq_ignore_whitespace
from .util import run_as_contextmanager, rowset, fail, \
provide_metadata, adict, force_drop_names, \
diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py
index 63667654d..8c962d7a3 100644
--- a/lib/sqlalchemy/testing/assertions.py
+++ b/lib/sqlalchemy/testing/assertions.py
@@ -245,6 +245,15 @@ def startswith_(a, fragment, msg=None):
a, fragment)
+def eq_ignore_whitespace(a, b, msg=None):
+ a = re.sub(r'^\s+?|\n', "", a)
+ a = re.sub(r' {2,}', " ", a)
+ b = re.sub(r'^\s+?|\n', "", b)
+ b = re.sub(r' {2,}', " ", b)
+
+ assert a == b, msg or "%r != %r" % (a, b)
+
+
def assert_raises(except_cls, callable_, *args, **kw):
try:
callable_(*args, **kw)
@@ -273,7 +282,8 @@ class AssertsCompiledSQL(object):
check_prefetch=None,
use_default_dialect=False,
allow_dialect_select=False,
- literal_binds=False):
+ literal_binds=False,
+ schema_translate_map=None):
if use_default_dialect:
dialect = default.DefaultDialect()
elif allow_dialect_select:
@@ -292,6 +302,9 @@ class AssertsCompiledSQL(object):
kw = {}
compile_kwargs = {}
+ if schema_translate_map:
+ kw['schema_translate_map'] = schema_translate_map
+
if params is not None:
kw['column_keys'] = list(params)
diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py
index 39d078985..56c422cf1 100644
--- a/lib/sqlalchemy/testing/assertsql.py
+++ b/lib/sqlalchemy/testing/assertsql.py
@@ -87,13 +87,18 @@ class CompiledSQL(SQLMatchRule):
compare_dialect = self._compile_dialect(execute_observed)
if isinstance(context.compiled.statement, _DDLCompiles):
compiled = \
- context.compiled.statement.compile(dialect=compare_dialect)
+ context.compiled.statement.compile(
+ dialect=compare_dialect,
+ schema_translate_map=context.
+ execution_options.get('schema_translate_map'))
else:
compiled = (
context.compiled.statement.compile(
dialect=compare_dialect,
column_keys=context.compiled.column_keys,
- inline=context.compiled.inline)
+ inline=context.compiled.inline,
+ schema_translate_map=context.
+ execution_options.get('schema_translate_map'))
)
_received_statement = re.sub(r'[\n\t]', '', util.text_type(compiled))
parameters = execute_observed.parameters
diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py
index 30d7aa73a..5bb6b966d 100644
--- a/lib/sqlalchemy/testing/plugin/pytestplugin.py
+++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py
@@ -55,7 +55,7 @@ def pytest_sessionstart(session):
plugin_base.post_begin()
if has_xdist:
- _follower_count = itertools.count(1)
+ import uuid
def pytest_configure_node(node):
# the master for each node fills slaveinput dictionary
@@ -63,7 +63,7 @@ if has_xdist:
plugin_base.memoize_important_follower_config(node.slaveinput)
- node.slaveinput["follower_ident"] = "test_%s" % next(_follower_count)
+ node.slaveinput["follower_ident"] = "test_%s" % uuid.uuid4().hex[0:12]
from sqlalchemy.testing import provision
provision.create_follower_db(node.slaveinput["follower_ident"])
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index 357735656..a152d5e93 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -75,6 +75,11 @@ class ProfileStatsFile(object):
platform_tokens.append("pypy")
if win32:
platform_tokens.append("win")
+ platform_tokens.append(
+ "nativeunicode"
+ if config.db.dialect.convert_unicode
+ else "dbapiunicode"
+ )
_has_cext = config.requirements._has_cextensions()
platform_tokens.append(_has_cext and "cextensions" or "nocextensions")
return "_".join(platform_tokens)
diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py
index 15bfad831..1b5d6e883 100644
--- a/lib/sqlalchemy/testing/requirements.py
+++ b/lib/sqlalchemy/testing/requirements.py
@@ -487,6 +487,19 @@ class SuiteRequirements(Requirements):
return exclusions.open()
@property
+ def json_type(self):
+ """target platform implements a native JSON type."""
+
+ return exclusions.closed()
+
+ @property
+ def json_array_indexes(self):
+ """"target platform supports numeric array indexes
+ within a JSON structure"""
+
+ return self.json_type
+
+ @property
def precision_numerics_general(self):
"""target backend has general support for moderately high-precision
numerics."""
diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py
index 230aeb1e9..6231e0fb9 100644
--- a/lib/sqlalchemy/testing/suite/test_types.py
+++ b/lib/sqlalchemy/testing/suite/test_types.py
@@ -5,7 +5,7 @@ from ..assertions import eq_
from ..config import requirements
from sqlalchemy import Integer, Unicode, UnicodeText, select
from sqlalchemy import Date, DateTime, Time, MetaData, String, \
- Text, Numeric, Float, literal, Boolean
+ Text, Numeric, Float, literal, Boolean, cast, null, JSON, and_
from ..schema import Table, Column
from ... import testing
import decimal
@@ -586,7 +586,260 @@ class BooleanTest(_LiteralRoundTripFixture, fixtures.TablesTest):
)
-__all__ = ('UnicodeVarcharTest', 'UnicodeTextTest',
+class JSONTest(_LiteralRoundTripFixture, fixtures.TablesTest):
+ __requires__ = 'json_type',
+ __backend__ = True
+
+ datatype = JSON
+
+ data1 = {
+ "key1": "value1",
+ "key2": "value2"
+ }
+
+ data2 = {
+ "Key 'One'": "value1",
+ "key two": "value2",
+ "key three": "value ' three '"
+ }
+
+ data3 = {
+ "key1": [1, 2, 3],
+ "key2": ["one", "two", "three"],
+ "key3": [{"four": "five"}, {"six": "seven"}]
+ }
+
+ data4 = ["one", "two", "three"]
+
+ data5 = {
+ "nested": {
+ "elem1": [
+ {"a": "b", "c": "d"},
+ {"e": "f", "g": "h"}
+ ],
+ "elem2": {
+ "elem3": {"elem4": "elem5"}
+ }
+ }
+ }
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('data_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(30), nullable=False),
+ Column('data', cls.datatype),
+ Column('nulldata', cls.datatype(none_as_null=True))
+ )
+
+ def test_round_trip_data1(self):
+ self._test_round_trip(self.data1)
+
+ def _test_round_trip(self, data_element):
+ data_table = self.tables.data_table
+
+ config.db.execute(
+ data_table.insert(),
+ {'name': 'row1', 'data': data_element}
+ )
+
+ row = config.db.execute(
+ select([
+ data_table.c.data,
+ ])
+ ).first()
+
+ eq_(row, (data_element, ))
+
+ def test_round_trip_none_as_sql_null(self):
+ col = self.tables.data_table.c['nulldata']
+
+ with config.db.connect() as conn:
+ conn.execute(
+ self.tables.data_table.insert(),
+ {"name": "r1", "data": None}
+ )
+
+ eq_(
+ conn.scalar(
+ select([self.tables.data_table.c.name]).
+ where(col.is_(null()))
+ ),
+ "r1"
+ )
+
+ eq_(
+ conn.scalar(
+ select([col])
+ ),
+ None
+ )
+
+ def test_round_trip_json_null_as_json_null(self):
+ col = self.tables.data_table.c['data']
+
+ with config.db.connect() as conn:
+ conn.execute(
+ self.tables.data_table.insert(),
+ {"name": "r1", "data": JSON.NULL}
+ )
+
+ eq_(
+ conn.scalar(
+ select([self.tables.data_table.c.name]).
+ where(cast(col, String) == 'null')
+ ),
+ "r1"
+ )
+
+ eq_(
+ conn.scalar(
+ select([col])
+ ),
+ None
+ )
+
+ def test_round_trip_none_as_json_null(self):
+ col = self.tables.data_table.c['data']
+
+ with config.db.connect() as conn:
+ conn.execute(
+ self.tables.data_table.insert(),
+ {"name": "r1", "data": None}
+ )
+
+ eq_(
+ conn.scalar(
+ select([self.tables.data_table.c.name]).
+ where(cast(col, String) == 'null')
+ ),
+ "r1"
+ )
+
+ eq_(
+ conn.scalar(
+ select([col])
+ ),
+ None
+ )
+
+ def _criteria_fixture(self):
+ config.db.execute(
+ self.tables.data_table.insert(),
+ [{"name": "r1", "data": self.data1},
+ {"name": "r2", "data": self.data2},
+ {"name": "r3", "data": self.data3},
+ {"name": "r4", "data": self.data4},
+ {"name": "r5", "data": self.data5}]
+ )
+
+ def _test_index_criteria(self, crit, expected):
+ self._criteria_fixture()
+ with config.db.connect() as conn:
+ eq_(
+ conn.scalar(
+ select([self.tables.data_table.c.name]).
+ where(crit)
+ ),
+ expected
+ )
+
+ def test_crit_spaces_in_key(self):
+ name = self.tables.data_table.c.name
+ col = self.tables.data_table.c['data']
+
+ # limit the rows here to avoid PG error
+ # "cannot extract field from a non-object", which is
+ # fixed in 9.4 but may exist in 9.3
+ self._test_index_criteria(
+ and_(
+ name.in_(["r1", "r2", "r3"]),
+ cast(col["key two"], String) == '"value2"'
+ ),
+ "r2"
+ )
+
+ @config.requirements.json_array_indexes
+ def test_crit_simple_int(self):
+ name = self.tables.data_table.c.name
+ col = self.tables.data_table.c['data']
+
+ # limit the rows here to avoid PG error
+ # "cannot extract array element from a non-array", which is
+ # fixed in 9.4 but may exist in 9.3
+ self._test_index_criteria(
+ and_(name == 'r4', cast(col[1], String) == '"two"'),
+ "r4"
+ )
+
+ def test_crit_mixed_path(self):
+ col = self.tables.data_table.c['data']
+ self._test_index_criteria(
+ cast(col[("key3", 1, "six")], String) == '"seven"',
+ "r3"
+ )
+
+ def test_crit_string_path(self):
+ col = self.tables.data_table.c['data']
+ self._test_index_criteria(
+ cast(col[("nested", "elem2", "elem3", "elem4")], String)
+ == '"elem5"',
+ "r5"
+ )
+
+ def test_unicode_round_trip(self):
+ s = select([
+ cast(
+ {
+ util.u('réveillé'): util.u('réveillé'),
+ "data": {"k1": util.u('drôle')}
+ },
+ self.datatype
+ )
+ ])
+ eq_(
+ config.db.scalar(s),
+ {
+ util.u('réveillé'): util.u('réveillé'),
+ "data": {"k1": util.u('drôle')}
+ },
+ )
+
+ def test_eval_none_flag_orm(self):
+ from sqlalchemy.ext.declarative import declarative_base
+ from sqlalchemy.orm import Session
+
+ Base = declarative_base()
+
+ class Data(Base):
+ __table__ = self.tables.data_table
+
+ s = Session(testing.db)
+
+ d1 = Data(name='d1', data=None, nulldata=None)
+ s.add(d1)
+ s.commit()
+
+ s.bulk_insert_mappings(
+ Data, [{"name": "d2", "data": None, "nulldata": None}]
+ )
+ eq_(
+ s.query(
+ cast(self.tables.data_table.c.data, String),
+ cast(self.tables.data_table.c.nulldata, String)
+ ).filter(self.tables.data_table.c.name == 'd1').first(),
+ ("null", None)
+ )
+ eq_(
+ s.query(
+ cast(self.tables.data_table.c.data, String),
+ cast(self.tables.data_table.c.nulldata, String)
+ ).filter(self.tables.data_table.c.name == 'd2').first(),
+ ("null", None)
+ )
+
+
+__all__ = ('UnicodeVarcharTest', 'UnicodeTextTest', 'JSONTest',
'DateTest', 'DateTimeTest', 'TextTest',
'NumericTest', 'IntegerTest',
'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest',
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index d82e683d9..ac6d3b439 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -17,7 +17,7 @@ __all__ = ['TypeEngine', 'TypeDecorator', 'UserDefinedType',
'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime',
'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode',
'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum',
- 'Indexable', 'Array']
+ 'Indexable', 'ARRAY', 'JSON']
from .sql.type_api import (
adapt_type,
@@ -28,7 +28,7 @@ from .sql.type_api import (
UserDefinedType
)
from .sql.sqltypes import (
- Array,
+ ARRAY,
BIGINT,
BINARY,
BLOB,
@@ -53,6 +53,7 @@ from .sql.sqltypes import (
INTEGER,
Integer,
Interval,
+ JSON,
LargeBinary,
MatchType,
NCHAR,
diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py
index 25c88c662..737b8a087 100644
--- a/lib/sqlalchemy/util/compat.py
+++ b/lib/sqlalchemy/util/compat.py
@@ -177,27 +177,27 @@ from operator import attrgetter as dottedgetter
if py3k:
def reraise(tp, value, tb=None, cause=None):
if cause is not None:
+ assert cause is not value, "Same cause emitted"
value.__cause__ = cause
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
- def raise_from_cause(exception, exc_info=None):
- if exc_info is None:
- exc_info = sys.exc_info()
- exc_type, exc_value, exc_tb = exc_info
- reraise(type(exception), exception, tb=exc_tb, cause=exc_value)
else:
+ # not as nice as that of Py3K, but at least preserves
+ # the code line where the issue occurred
exec("def reraise(tp, value, tb=None, cause=None):\n"
+ " if cause is not None:\n"
+ " assert cause is not value, 'Same cause emitted'\n"
" raise tp, value, tb\n")
- def raise_from_cause(exception, exc_info=None):
- # not as nice as that of Py3K, but at least preserves
- # the code line where the issue occurred
- if exc_info is None:
- exc_info = sys.exc_info()
- exc_type, exc_value, exc_tb = exc_info
- reraise(type(exception), exception, tb=exc_tb)
+
+def raise_from_cause(exception, exc_info=None):
+ if exc_info is None:
+ exc_info = sys.exc_info()
+ exc_type, exc_value, exc_tb = exc_info
+ cause = exc_value if exc_value is not exception else None
+ reraise(type(exception), exception, tb=exc_tb, cause=cause)
if py3k:
exec_ = getattr(builtins, 'exec')
diff --git a/regen_callcounts.tox.ini b/regen_callcounts.tox.ini
index e74ceef36..0106de97b 100644
--- a/regen_callcounts.tox.ini
+++ b/regen_callcounts.tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py{27,33,34}-sqla_{cext,nocext}-db_{sqlite,postgresql,mysql}
+envlist = py{27,34,35}-sqla_{cext,nocext}-db_{sqlite,postgresql,mysql}
[base]
basecommand=
@@ -8,17 +8,15 @@ basecommand=
[testenv]
deps=pytest
mock
- py{27,33,34}-sqla_{cext,nocext}-db_{postgresql}: psycopg2
- py{27}-sqla_{cext,nocext}-db_{mysql}: mysql-python
- py{33,34}-sqla_{cext,nocext}-db_{mysql}: pymysql
-
+ db_postgresql: .[postgresql]
+ db_mysql: .[mysql]
+ db_mysql: .[pymysql]
commands=
- py{27}-sqla_{cext,nocext}-db_{mysql}: {[base]basecommand} --db mysql {posargs}
- py{33,34}-sqla_{cext,nocext}-db_{mysql}: {[base]basecommand} --db pymysql {posargs}
- db_{postgresql}: {[base]basecommand} --db postgresql {posargs}
- db_{sqlite}: {[base]basecommand} --db sqlite {posargs}
+ db_{mysql}: {[base]basecommand} --db mysql --db pymysql
+ db_{postgresql}: {[base]basecommand} --db postgresql
+ db_{sqlite}: {[base]basecommand} --db sqlite
# -E : ignore PYTHON* environment variables (such as PYTHONPATH)
# -s : don't add user site directory to sys.path; also PYTHONNOUSERSITE
diff --git a/setup.cfg b/setup.cfg
index 2d203f1ed..3881f2b99 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -16,6 +16,7 @@ python_files=test/*test_*.py
sign = 1
identity = C4DAFEE1
+
[sqla_testing]
requirement_cls=test.requirements:DefaultRequirements
profile_file=test/profiles.txt
@@ -38,16 +39,18 @@ oracle_db_link = test_link
default=sqlite:///:memory:
sqlite=sqlite:///:memory:
sqlite_file=sqlite:///querytest.db
+
postgresql=postgresql://scott:tiger@127.0.0.1:5432/test
pg8000=postgresql+pg8000://scott:tiger@127.0.0.1:5432/test
-postgres=postgresql://scott:tiger@127.0.0.1:5432/test
-postgresql_jython=postgresql+zxjdbc://scott:tiger@127.0.0.1:5432/test
postgresql_psycopg2cffi=postgresql+psycopg2cffi://scott:tiger@127.0.0.1:5432/test
-mysql=mysql://scott:tiger@127.0.0.1:3306/test?charset=utf8&use_unicode=0
-mysqlconnector=mysql+mysqlconnector://scott:tiger@127.0.0.1:3306/test
-mssql=mssql+pyodbc://scott:tiger@ms_2008
-oursql=mysql+oursql://scott:tiger@127.0.0.1:3306/test
+
+mysql=mysql://scott:tiger@127.0.0.1:3306/test?charset=utf8
pymysql=mysql+pymysql://scott:tiger@127.0.0.1:3306/test?charset=utf8
+
+mssql=mssql+pyodbc://scott:tiger@ms_2008
+mssql_pymssql=mssql+pymssql://scott:tiger@ms_2008
+
oracle=oracle://scott:tiger@127.0.0.1:1521
oracle8=oracle://scott:tiger@127.0.0.1:1521/?use_ansi=0
+
firebird=firebird://sysdba:masterkey@localhost//Users/classic/foo.fdb
diff --git a/setup.py b/setup.py
index 5b97cb9fe..7fe61a4aa 100644
--- a/setup.py
+++ b/setup.py
@@ -152,6 +152,16 @@ def run_setup(with_cext):
"Operating System :: OS Independent",
],
distclass=Distribution,
+ extras_require={
+ 'mysql': ['mysqlclient'],
+ 'pymysql': ['pymysql'],
+ 'postgresql': ['psycopg2'],
+ 'postgresql_pg8000': ['pg8000'],
+ 'postgresql_psycopg2cffi': ['psycopg2cffi'],
+ 'oracle': ['cx_oracle'],
+ 'mssql_pyodbc': ['pyodbc'],
+ 'mssql_pymssql': ['pymssql']
+ },
**kwargs
)
diff --git a/test/aaa_profiling/test_resultset.py b/test/aaa_profiling/test_resultset.py
index a964adcae..9ffa21cb6 100644
--- a/test/aaa_profiling/test_resultset.py
+++ b/test/aaa_profiling/test_resultset.py
@@ -75,7 +75,10 @@ class ExecutionTest(fixtures.TestBase):
@profiling.function_call_count()
def go():
c.execute("select 1")
- go()
+ try:
+ go()
+ finally:
+ c.close()
def test_minimal_engine_execute(self, variance=0.10):
# create an engine without any instrumentation.
diff --git a/test/base/test_tutorials.py b/test/base/test_tutorials.py
index 73dcbb524..55a0b92d6 100644
--- a/test/base/test_tutorials.py
+++ b/test/base/test_tutorials.py
@@ -50,10 +50,11 @@ class DocTest(fixtures.TestBase):
self._teardown_create_table_patcher()
self._teardown_logger()
-
def _run_doctest_for_content(self, name, content):
optionflags = (
- doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE |
+ doctest.ELLIPSIS |
+ doctest.NORMALIZE_WHITESPACE |
+ doctest.IGNORE_EXCEPTION_DETAIL |
_get_allow_unicode_flag()
)
runner = doctest.DocTestRunner(
diff --git a/test/base/test_utils.py b/test/base/test_utils.py
index 4370d612b..6d162ff4d 100644
--- a/test/base/test_utils.py
+++ b/test/base/test_utils.py
@@ -1,4 +1,5 @@
import copy
+import sys
from sqlalchemy import util, sql, exc, testing
from sqlalchemy.testing import assert_raises, assert_raises_message, fixtures
@@ -2134,6 +2135,64 @@ class TestClassHierarchy(fixtures.TestBase):
eq_(set(util.class_hierarchy(A)), set((A, B, object)))
+class ReraiseTest(fixtures.TestBase):
+ @testing.requires.python3
+ def test_raise_from_cause_same_cause(self):
+ class MyException(Exception):
+ pass
+
+ def go():
+ try:
+ raise MyException("exc one")
+ except Exception as err:
+ util.raise_from_cause(err)
+
+ try:
+ go()
+ assert False
+ except MyException as err:
+ is_(err.__cause__, None)
+
+ def test_reraise_disallow_same_cause(self):
+ class MyException(Exception):
+ pass
+
+ def go():
+ try:
+ raise MyException("exc one")
+ except Exception as err:
+ type_, value, tb = sys.exc_info()
+ util.reraise(type_, err, tb, value)
+
+ assert_raises_message(
+ AssertionError,
+ "Same cause emitted",
+ go
+ )
+
+ def test_raise_from_cause(self):
+ class MyException(Exception):
+ pass
+
+ class MyOtherException(Exception):
+ pass
+
+ me = MyException("exc on")
+
+ def go():
+ try:
+ raise me
+ except Exception:
+ util.raise_from_cause(MyOtherException("exc two"))
+
+ try:
+ go()
+ assert False
+ except MyOtherException as moe:
+ if testing.requires.python3.enabled:
+ is_(moe.__cause__, me)
+
+
class TestClassProperty(fixtures.TestBase):
def test_simple(self):
diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py
index 80be9f67d..d91c79db2 100644
--- a/test/dialect/mssql/test_compiler.py
+++ b/test/dialect/mssql/test_compiler.py
@@ -383,7 +383,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
for field in 'day', 'month', 'year':
self.assert_compile(
select([extract(field, t.c.col1)]),
- 'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % field)
+ 'SELECT DATEPART(%s, t.col1) AS anon_1 FROM t' % field)
def test_update_returning(self):
table1 = table(
diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py
index dad86c60a..100e4e0ed 100644
--- a/test/dialect/mssql/test_types.py
+++ b/test/dialect/mssql/test_types.py
@@ -64,6 +64,18 @@ class MSDateTypeTest(fixtures.TestBase):
result_processor, 'abc'
)
+ def test_extract(self):
+ from sqlalchemy import extract
+ fivedaysago = datetime.datetime.now() \
+ - datetime.timedelta(days=5)
+ for field, exp in ('year', fivedaysago.year), \
+ ('month', fivedaysago.month), ('day', fivedaysago.day):
+ r = testing.db.execute(
+ select([
+ extract(field, fivedaysago)])
+ ).scalar()
+ eq_(r, exp)
+
class TypeDDLTest(fixtures.TestBase):
diff --git a/test/dialect/mysql/test_compiler.py b/test/dialect/mysql/test_compiler.py
index 60af82bab..0571ce526 100644
--- a/test/dialect/mysql/test_compiler.py
+++ b/test/dialect/mysql/test_compiler.py
@@ -184,6 +184,12 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
schema.CreateTable(t2).compile, dialect=mysql.dialect()
)
+ def test_match(self):
+ matchtable = table('matchtable', column('title', String))
+ self.assert_compile(
+ matchtable.c.title.match('somstr'),
+ "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)")
+
def test_for_update(self):
table1 = table('mytable',
column('myid'), column('name'), column('description'))
diff --git a/test/dialect/mysql/test_query.py b/test/dialect/mysql/test_query.py
index 85513167c..c6b7a1036 100644
--- a/test/dialect/mysql/test_query.py
+++ b/test/dialect/mysql/test_query.py
@@ -2,10 +2,11 @@
from sqlalchemy.testing import eq_, is_
from sqlalchemy import *
-from sqlalchemy.testing import fixtures, AssertsCompiledSQL
+from sqlalchemy.testing import fixtures
from sqlalchemy import testing
-class IdiosyncrasyTest(fixtures.TestBase, AssertsCompiledSQL):
+
+class IdiosyncrasyTest(fixtures.TestBase):
__only_on__ = 'mysql'
__backend__ = True
@@ -27,7 +28,7 @@ class IdiosyncrasyTest(fixtures.TestBase, AssertsCompiledSQL):
)
-class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
+class MatchTest(fixtures.TestBase):
__only_on__ = 'mysql'
__backend__ = True
@@ -75,25 +76,6 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
def teardown_class(cls):
metadata.drop_all()
- @testing.fails_on('mysql+mysqlconnector', 'uses pyformat')
- def test_expression_format(self):
- format = testing.db.dialect.paramstyle == 'format' and '%s' or '?'
- self.assert_compile(
- matchtable.c.title.match('somstr'),
- "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)" % format)
-
- @testing.fails_on('mysql+mysqldb', 'uses format')
- @testing.fails_on('mysql+pymysql', 'uses format')
- @testing.fails_on('mysql+cymysql', 'uses format')
- @testing.fails_on('mysql+oursql', 'uses format')
- @testing.fails_on('mysql+pyodbc', 'uses format')
- @testing.fails_on('mysql+zxjdbc', 'uses format')
- def test_expression_pyformat(self):
- format = '%(title_1)s'
- self.assert_compile(
- matchtable.c.title.match('somstr'),
- "MATCH (matchtable.title) AGAINST (%s IN BOOLEAN MODE)" % format)
-
def test_simple_match(self):
results = (matchtable.select().
where(matchtable.c.title.match('python')).
@@ -176,7 +158,7 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL):
eq_([1, 3, 5], [r.id for r in results])
-class AnyAllTest(fixtures.TablesTest, AssertsCompiledSQL):
+class AnyAllTest(fixtures.TablesTest):
__only_on__ = 'mysql'
__backend__ = True
diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py
index b8cbea819..44880c36b 100644
--- a/test/dialect/mysql/test_reflection.py
+++ b/test/dialect/mysql/test_reflection.py
@@ -9,6 +9,7 @@ from sqlalchemy import event
from sqlalchemy import sql
from sqlalchemy import inspect
from sqlalchemy.dialects.mysql import base as mysql
+from sqlalchemy.dialects.mysql import reflection as _reflection
from sqlalchemy.testing import fixtures, AssertsExecutionResults
from sqlalchemy import testing
@@ -532,7 +533,7 @@ class ReflectionTest(fixtures.TestBase, AssertsExecutionResults):
class RawReflectionTest(fixtures.TestBase):
def setup(self):
dialect = mysql.dialect()
- self.parser = mysql.MySQLTableDefinitionParser(
+ self.parser = _reflection.MySQLTableDefinitionParser(
dialect, dialect.identifier_preparer)
def test_key_reflection(self):
diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py
index 7c279ffbf..1fb152377 100644
--- a/test/dialect/mysql/test_types.py
+++ b/test/dialect/mysql/test_types.py
@@ -1,6 +1,6 @@
# coding: utf-8
-from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, is_
from sqlalchemy import *
from sqlalchemy import sql, exc, schema
from sqlalchemy.util import u
@@ -10,6 +10,7 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL, AssertsExecutionRes
from sqlalchemy import testing
import datetime
import decimal
+from sqlalchemy import types as sqltypes
class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
@@ -602,6 +603,49 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
eq_(colspec(table.c.y5), 'y5 YEAR(4)')
+class JSONTest(fixtures.TestBase):
+ __requires__ = ('json_type', )
+ __only_on__ = 'mysql'
+ __backend__ = True
+
+ @testing.provide_metadata
+ def test_reflection(self):
+
+ Table(
+ 'mysql_json', self.metadata,
+ Column('foo', mysql.JSON)
+ )
+ self.metadata.create_all()
+
+ reflected = Table('mysql_json', MetaData(), autoload_with=testing.db)
+ is_(reflected.c.foo.type._type_affinity, sqltypes.JSON)
+ assert isinstance(reflected.c.foo.type, mysql.JSON)
+
+ @testing.provide_metadata
+ def test_rudimental_round_trip(self):
+ # note that test_suite has many more JSON round trip tests
+ # using the backend-agnostic JSON type
+
+ mysql_json = Table(
+ 'mysql_json', self.metadata,
+ Column('foo', mysql.JSON)
+ )
+ self.metadata.create_all()
+
+ value = {
+ 'json': {'foo': 'bar'},
+ 'recs': ['one', 'two']
+ }
+
+ with testing.db.connect() as conn:
+ conn.execute(mysql_json.insert(), foo=value)
+
+ eq_(
+ conn.scalar(select([mysql_json.c.foo])),
+ value
+ )
+
+
class EnumSetTest(
fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py
index 71d8fa3e5..87e48d3f2 100644
--- a/test/dialect/postgresql/test_compiler.py
+++ b/test/dialect/postgresql/test_compiler.py
@@ -169,6 +169,24 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"VARCHAR(1), CHECK (somecolumn IN ('x', "
"'y', 'z')))")
+ def test_create_type_schema_translate(self):
+ e1 = Enum('x', 'y', 'z', name='somename')
+ e2 = Enum('x', 'y', 'z', name='somename', schema='someschema')
+ schema_translate_map = {None: "foo", "someschema": "bar"}
+
+ self.assert_compile(
+ postgresql.CreateEnumType(e1),
+ "CREATE TYPE foo.somename AS ENUM ('x', 'y', 'z')",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ postgresql.CreateEnumType(e2),
+ "CREATE TYPE bar.somename AS ENUM ('x', 'y', 'z')",
+ schema_translate_map=schema_translate_map
+ )
+
+
def test_create_table_with_tablespace(self):
m = MetaData()
tbl = Table(
diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py
index 49a8cfabd..50b66f290 100644
--- a/test/dialect/postgresql/test_types.py
+++ b/test/dialect/postgresql/test_types.py
@@ -7,7 +7,7 @@ from sqlalchemy import testing
import datetime
from sqlalchemy import Table, MetaData, Column, Integer, Enum, Float, select, \
func, DateTime, Numeric, exc, String, cast, REAL, TypeDecorator, Unicode, \
- Text, null, text, column, Array, any_, all_
+ Text, null, text, column, ARRAY, any_, all_
from sqlalchemy.sql import operators
from sqlalchemy import types
import sqlalchemy as sa
@@ -819,7 +819,7 @@ class ArrayTest(AssertsCompiledSQL, fixtures.TestBase):
def test_array_index_map_dimensions(self):
col = column('x', postgresql.ARRAY(Integer, dimensions=3))
is_(
- col[5].type._type_affinity, Array
+ col[5].type._type_affinity, ARRAY
)
assert isinstance(
col[5].type, postgresql.ARRAY
@@ -828,7 +828,7 @@ class ArrayTest(AssertsCompiledSQL, fixtures.TestBase):
col[5].type.dimensions, 2
)
is_(
- col[5][6].type._type_affinity, Array
+ col[5][6].type._type_affinity, ARRAY
)
assert isinstance(
col[5][6].type, postgresql.ARRAY
@@ -859,8 +859,8 @@ class ArrayTest(AssertsCompiledSQL, fixtures.TestBase):
)
# type affinity is Array...
- is_(arrtable.c.intarr[1:3].type._type_affinity, Array)
- is_(arrtable.c.strarr[1:3].type._type_affinity, Array)
+ is_(arrtable.c.intarr[1:3].type._type_affinity, ARRAY)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, ARRAY)
# but the slice returns the actual type
assert isinstance(arrtable.c.intarr[1:3].type, postgresql.ARRAY)
@@ -892,12 +892,12 @@ class ArrayTest(AssertsCompiledSQL, fixtures.TestBase):
type_=postgresql.ARRAY(Integer)
)[3],
"(array_cat(ARRAY[%(param_1)s, %(param_2)s, %(param_3)s], "
- "ARRAY[%(param_4)s, %(param_5)s, %(param_6)s]))[%(param_7)s]"
+ "ARRAY[%(param_4)s, %(param_5)s, %(param_6)s]))[%(array_cat_1)s]"
)
def test_array_agg_generic(self):
expr = func.array_agg(column('q', Integer))
- is_(expr.type.__class__, types.Array)
+ is_(expr.type.__class__, types.ARRAY)
is_(expr.type.item_type.__class__, Integer)
def test_array_agg_specific(self):
@@ -1079,13 +1079,13 @@ class ArrayRoundTripTest(fixtures.TablesTest, AssertsExecutionResults):
def test_array_comparison(self):
arrtable = self.tables.arrtable
- arrtable.insert().execute(intarr=[1, 2, 3],
+ arrtable.insert().execute(id=5, intarr=[1, 2, 3],
strarr=[util.u('abc'), util.u('def')])
results = select([arrtable.c.id]).\
where(arrtable.c.intarr < [4, 5, 6]).execute()\
.fetchall()
eq_(len(results), 1)
- eq_(results[0][0], 3)
+ eq_(results[0][0], 5)
def test_array_subtype_resultprocessor(self):
arrtable = self.tables.arrtable
@@ -1811,7 +1811,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_where_getitem(self):
self._test_where(
self.hashcol['bar'] == None,
- "(test_table.hash -> %(hash_1)s) IS NULL"
+ "test_table.hash -> %(hash_1)s IS NULL"
)
def test_cols_get(self):
@@ -1894,7 +1894,7 @@ class HStoreTest(AssertsCompiledSQL, fixtures.TestBase):
def test_cols_concat_get(self):
self._test_cols(
(self.hashcol + self.hashcol)['foo'],
- "test_table.hash || test_table.hash -> %(param_1)s AS anon_1"
+ "(test_table.hash || test_table.hash) -> %(param_1)s AS anon_1"
)
def test_cols_keys(self):
@@ -1980,6 +1980,21 @@ class HStoreRoundTripTest(fixtures.TablesTest):
cols = insp.get_columns('data_table')
assert isinstance(cols[2]['type'], HSTORE)
+ def test_literal_round_trip(self):
+ # in particular, this tests that the array index
+ # operator against the function is handled by PG; with some
+ # array functions it requires outer parenthezisation on the left and
+ # we may not be doing that here
+ expr = hstore(
+ postgresql.array(['1', '2']),
+ postgresql.array(['3', None]))['1']
+ eq_(
+ testing.db.scalar(
+ select([expr])
+ ),
+ "3"
+ )
+
@testing.requires.psycopg2_native_hstore
def test_insert_native(self):
engine = testing.db
@@ -2411,100 +2426,33 @@ class JSONTest(AssertsCompiledSQL, fixtures.TestBase):
) % expected
)
- def test_bind_serialize_default(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_bind_processor(
- dialect)
- eq_(
- proc({"A": [1, 2, 3, True, False]}),
- '{"A": [1, 2, 3, true, false]}'
- )
-
- def test_bind_serialize_None(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_bind_processor(
- dialect)
- eq_(
- proc(None),
- 'null'
- )
-
- def test_bind_serialize_none_as_null(self):
- dialect = postgresql.dialect()
- proc = JSON(none_as_null=True)._cached_bind_processor(
- dialect)
- eq_(
- proc(None),
- None
- )
- eq_(
- proc(null()),
- None
- )
-
- def test_bind_serialize_null(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_bind_processor(
- dialect)
- eq_(
- proc(null()),
- None
- )
-
- def test_result_deserialize_default(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_result_processor(
- dialect, None)
- eq_(
- proc('{"A": [1, 2, 3, true, false]}'),
- {"A": [1, 2, 3, True, False]}
- )
-
- def test_result_deserialize_null(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_result_processor(
- dialect, None)
- eq_(
- proc('null'),
- None
- )
-
- def test_result_deserialize_None(self):
- dialect = postgresql.dialect()
- proc = self.test_table.c.test_column.type._cached_result_processor(
- dialect, None)
- eq_(
- proc(None),
- None
- )
-
# This test is a bit misleading -- in real life you will need to cast to
# do anything
def test_where_getitem(self):
self._test_where(
self.jsoncol['bar'] == None,
- "(test_table.test_column -> %(test_column_1)s) IS NULL"
+ "test_table.test_column -> %(test_column_1)s IS NULL"
)
def test_where_path(self):
self._test_where(
self.jsoncol[("foo", 1)] == None,
- "(test_table.test_column #> %(test_column_1)s) IS NULL"
+ "test_table.test_column #> %(test_column_1)s IS NULL"
)
def test_path_typing(self):
col = column('x', JSON())
is_(
- col['q'].type._type_affinity, JSON
+ col['q'].type._type_affinity, types.JSON
)
is_(
- col[('q', )].type._type_affinity, JSON
+ col[('q', )].type._type_affinity, types.JSON
)
is_(
- col['q']['p'].type._type_affinity, JSON
+ col['q']['p'].type._type_affinity, types.JSON
)
is_(
- col[('q', 'p')].type._type_affinity, JSON
+ col[('q', 'p')].type._type_affinity, types.JSON
)
def test_custom_astext_type(self):
@@ -2528,7 +2476,7 @@ class JSONTest(AssertsCompiledSQL, fixtures.TestBase):
def test_where_getitem_as_text(self):
self._test_where(
self.jsoncol['bar'].astext == None,
- "(test_table.test_column ->> %(test_column_1)s) IS NULL"
+ "test_table.test_column ->> %(test_column_1)s IS NULL"
)
def test_where_getitem_astext_cast(self):
@@ -2548,7 +2496,7 @@ class JSONTest(AssertsCompiledSQL, fixtures.TestBase):
def test_where_path_as_text(self):
self._test_where(
self.jsoncol[("foo", 1)].astext == None,
- "(test_table.test_column #>> %(test_column_1)s) IS NULL"
+ "test_table.test_column #>> %(test_column_1)s IS NULL"
)
def test_cols_get(self):
diff --git a/test/dialect/test_sybase.py b/test/dialect/test_sybase.py
index 1318a282b..d8f7d3aae 100644
--- a/test/dialect/test_sybase.py
+++ b/test/dialect/test_sybase.py
@@ -1,7 +1,8 @@
-from sqlalchemy import *
+from sqlalchemy import extract, select
from sqlalchemy import sql
from sqlalchemy.databases import sybase
-from sqlalchemy.testing import *
+from sqlalchemy.testing import assert_raises_message, \
+ fixtures, AssertsCompiledSQL
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
@@ -17,12 +18,19 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'milliseconds': 'millisecond',
'millisecond': 'millisecond',
'year': 'year',
- }
+ }
for field, subst in list(mapping.items()):
self.assert_compile(
select([extract(field, t.c.col1)]),
'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % subst)
+ def test_offset_not_supported(self):
+ stmt = select([1]).offset(10)
+ assert_raises_message(
+ NotImplementedError,
+ "Sybase ASE does not support OFFSET",
+ stmt.compile, dialect=self.__dialect__
+ )
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index fbb1878dc..5ea5d3515 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -21,6 +21,8 @@ from sqlalchemy.testing import fixtures
from sqlalchemy.testing.mock import Mock, call, patch
from contextlib import contextmanager
from sqlalchemy.util import nested
+from sqlalchemy.testing.assertsql import CompiledSQL
+
users, metadata, users_autoinc = None, None, None
@@ -805,6 +807,40 @@ class CompiledCacheTest(fixtures.TestBase):
eq_(compile_mock.call_count, 1)
eq_(len(cache), 1)
+ @testing.requires.schemas
+ @testing.provide_metadata
+ def test_schema_translate_in_key(self):
+ Table(
+ 'x', self.metadata, Column('q', Integer))
+ Table(
+ 'x', self.metadata, Column('q', Integer),
+ schema=config.test_schema)
+ self.metadata.create_all()
+
+ m = MetaData()
+ t1 = Table('x', m, Column('q', Integer))
+ ins = t1.insert()
+ stmt = select([t1.c.q])
+
+ cache = {}
+ with config.db.connect().execution_options(
+ compiled_cache=cache,
+ ) as conn:
+ conn.execute(ins, {"q": 1})
+ eq_(conn.scalar(stmt), 1)
+
+ with config.db.connect().execution_options(
+ compiled_cache=cache,
+ schema_translate_map={None: config.test_schema}
+ ) as conn:
+ conn.execute(ins, {"q": 2})
+ eq_(conn.scalar(stmt), 2)
+
+ with config.db.connect().execution_options(
+ compiled_cache=cache,
+ ) as conn:
+ eq_(conn.scalar(stmt), 1)
+
class MockStrategyTest(fixtures.TestBase):
@@ -989,6 +1025,156 @@ class ResultProxyTest(fixtures.TestBase):
finally:
r.close()
+class SchemaTranslateTest(fixtures.TestBase, testing.AssertsExecutionResults):
+ __requires__ = 'schemas',
+ __backend__ = True
+
+ def test_create_table(self):
+ map_ = {
+ None: config.test_schema,
+ "foo": config.test_schema, "bar": None}
+
+ metadata = MetaData()
+ t1 = Table('t1', metadata, Column('x', Integer))
+ t2 = Table('t2', metadata, Column('x', Integer), schema="foo")
+ t3 = Table('t3', metadata, Column('x', Integer), schema="bar")
+
+ with self.sql_execution_asserter(config.db) as asserter:
+ with config.db.connect().execution_options(
+ schema_translate_map=map_) as conn:
+
+ t1.create(conn)
+ t2.create(conn)
+ t3.create(conn)
+
+ t3.drop(conn)
+ t2.drop(conn)
+ t1.drop(conn)
+
+ asserter.assert_(
+ CompiledSQL("CREATE TABLE %s.t1 (x INTEGER)" % config.test_schema),
+ CompiledSQL("CREATE TABLE %s.t2 (x INTEGER)" % config.test_schema),
+ CompiledSQL("CREATE TABLE t3 (x INTEGER)"),
+ CompiledSQL("DROP TABLE t3"),
+ CompiledSQL("DROP TABLE %s.t2" % config.test_schema),
+ CompiledSQL("DROP TABLE %s.t1" % config.test_schema)
+ )
+
+ def _fixture(self):
+ metadata = self.metadata
+ Table(
+ 't1', metadata, Column('x', Integer),
+ schema=config.test_schema)
+ Table(
+ 't2', metadata, Column('x', Integer),
+ schema=config.test_schema)
+ Table('t3', metadata, Column('x', Integer), schema=None)
+ metadata.create_all()
+
+ def test_ddl_hastable(self):
+
+ map_ = {
+ None: config.test_schema,
+ "foo": config.test_schema, "bar": None}
+
+ metadata = MetaData()
+ Table('t1', metadata, Column('x', Integer))
+ Table('t2', metadata, Column('x', Integer), schema="foo")
+ Table('t3', metadata, Column('x', Integer), schema="bar")
+
+ with config.db.connect().execution_options(
+ schema_translate_map=map_) as conn:
+ metadata.create_all(conn)
+
+ assert config.db.has_table('t1', schema=config.test_schema)
+ assert config.db.has_table('t2', schema=config.test_schema)
+ assert config.db.has_table('t3', schema=None)
+
+ with config.db.connect().execution_options(
+ schema_translate_map=map_) as conn:
+ metadata.drop_all(conn)
+
+ assert not config.db.has_table('t1', schema=config.test_schema)
+ assert not config.db.has_table('t2', schema=config.test_schema)
+ assert not config.db.has_table('t3', schema=None)
+
+ @testing.provide_metadata
+ def test_crud(self):
+ self._fixture()
+
+ map_ = {
+ None: config.test_schema,
+ "foo": config.test_schema, "bar": None}
+
+ metadata = MetaData()
+ t1 = Table('t1', metadata, Column('x', Integer))
+ t2 = Table('t2', metadata, Column('x', Integer), schema="foo")
+ t3 = Table('t3', metadata, Column('x', Integer), schema="bar")
+
+ with self.sql_execution_asserter(config.db) as asserter:
+ with config.db.connect().execution_options(
+ schema_translate_map=map_) as conn:
+
+ conn.execute(t1.insert(), {'x': 1})
+ conn.execute(t2.insert(), {'x': 1})
+ conn.execute(t3.insert(), {'x': 1})
+
+ conn.execute(t1.update().values(x=1).where(t1.c.x == 1))
+ conn.execute(t2.update().values(x=2).where(t2.c.x == 1))
+ conn.execute(t3.update().values(x=3).where(t3.c.x == 1))
+
+ eq_(conn.scalar(select([t1.c.x])), 1)
+ eq_(conn.scalar(select([t2.c.x])), 2)
+ eq_(conn.scalar(select([t3.c.x])), 3)
+
+ conn.execute(t1.delete())
+ conn.execute(t2.delete())
+ conn.execute(t3.delete())
+
+ asserter.assert_(
+ CompiledSQL(
+ "INSERT INTO %s.t1 (x) VALUES (:x)" % config.test_schema),
+ CompiledSQL(
+ "INSERT INTO %s.t2 (x) VALUES (:x)" % config.test_schema),
+ CompiledSQL(
+ "INSERT INTO t3 (x) VALUES (:x)"),
+ CompiledSQL(
+ "UPDATE %s.t1 SET x=:x WHERE %s.t1.x = :x_1" % (
+ config.test_schema, config.test_schema)),
+ CompiledSQL(
+ "UPDATE %s.t2 SET x=:x WHERE %s.t2.x = :x_1" % (
+ config.test_schema, config.test_schema)),
+ CompiledSQL("UPDATE t3 SET x=:x WHERE t3.x = :x_1"),
+ CompiledSQL("SELECT %s.t1.x FROM %s.t1" % (
+ config.test_schema, config.test_schema)),
+ CompiledSQL("SELECT %s.t2.x FROM %s.t2" % (
+ config.test_schema, config.test_schema)),
+ CompiledSQL("SELECT t3.x FROM t3"),
+ CompiledSQL("DELETE FROM %s.t1" % config.test_schema),
+ CompiledSQL("DELETE FROM %s.t2" % config.test_schema),
+ CompiledSQL("DELETE FROM t3")
+ )
+
+ @testing.provide_metadata
+ def test_via_engine(self):
+ self._fixture()
+
+ map_ = {
+ None: config.test_schema,
+ "foo": config.test_schema, "bar": None}
+
+ metadata = MetaData()
+ t2 = Table('t2', metadata, Column('x', Integer), schema="foo")
+
+ with self.sql_execution_asserter(config.db) as asserter:
+ eng = config.db.execution_options(schema_translate_map=map_)
+ conn = eng.connect()
+ conn.execute(select([t2.c.x]))
+ asserter.assert_(
+ CompiledSQL("SELECT %s.t2.x FROM %s.t2" % (
+ config.test_schema, config.test_schema)),
+ )
+
class ExecutionOptionsTest(fixtures.TestBase):
diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py
index 4601a6bda..0e1f6c3d2 100644
--- a/test/engine/test_parseconnect.py
+++ b/test/engine/test_parseconnect.py
@@ -6,8 +6,8 @@ import sqlalchemy as tsa
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy.testing.mock import Mock, MagicMock, call
-from sqlalchemy import event
-from sqlalchemy import select
+from sqlalchemy.dialects import registry
+from sqlalchemy.dialects import plugins
dialect = None
@@ -172,7 +172,6 @@ class CreateEngineTest(fixtures.TestBase):
def test_engine_from_config_custom(self):
from sqlalchemy import util
- from sqlalchemy.dialects import registry
tokens = __name__.split(".")
class MyDialect(MockDialect):
@@ -325,21 +324,18 @@ class CreateEngineTest(fixtures.TestBase):
class TestRegNewDBAPI(fixtures.TestBase):
def test_register_base(self):
- from sqlalchemy.dialects import registry
registry.register("mockdialect", __name__, "MockDialect")
e = create_engine("mockdialect://")
assert isinstance(e.dialect, MockDialect)
def test_register_dotted(self):
- from sqlalchemy.dialects import registry
registry.register("mockdialect.foob", __name__, "MockDialect")
e = create_engine("mockdialect+foob://")
assert isinstance(e.dialect, MockDialect)
def test_register_legacy(self):
- from sqlalchemy.dialects import registry
tokens = __name__.split(".")
global dialect
@@ -351,7 +347,6 @@ class TestRegNewDBAPI(fixtures.TestBase):
assert isinstance(e.dialect, MockDialect)
def test_register_per_dbapi(self):
- from sqlalchemy.dialects import registry
registry.register("mysql.my_mock_dialect", __name__, "MockDialect")
e = create_engine("mysql+my_mock_dialect://")
@@ -367,7 +362,6 @@ class TestRegNewDBAPI(fixtures.TestBase):
WrapperFactory = Mock()
WrapperFactory.get_dialect_cls.side_effect = get_dialect_cls
- from sqlalchemy.dialects import registry
registry.register("wrapperdialect", __name__, "WrapperFactory")
from sqlalchemy.dialects import sqlite
@@ -384,6 +378,39 @@ class TestRegNewDBAPI(fixtures.TestBase):
]
)
+ @testing.requires.sqlite
+ def test_plugin_registration(self):
+ from sqlalchemy.dialects import sqlite
+
+ global MyEnginePlugin
+
+ def side_effect(url, kw):
+ eq_(kw, {"logging_name": "foob"})
+ kw['logging_name'] = 'bar'
+ return MyEnginePlugin
+
+ MyEnginePlugin = Mock(side_effect=side_effect)
+
+ plugins.register("engineplugin", __name__, "MyEnginePlugin")
+
+ e = create_engine(
+ "sqlite:///?plugin=engineplugin&foo=bar", logging_name='foob')
+ eq_(e.dialect.name, "sqlite")
+ eq_(e.logging_name, "bar")
+ assert isinstance(e.dialect, sqlite.dialect)
+
+ eq_(
+ MyEnginePlugin.mock_calls,
+ [
+ call(e.url, {}),
+ call.engine_created(e)
+ ]
+ )
+ eq_(
+ str(MyEnginePlugin.mock_calls[0][1][0]),
+ "sqlite:///?foo=bar"
+ )
+
class MockDialect(DefaultDialect):
@classmethod
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index 39ebcc91b..0183df71b 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -424,6 +424,7 @@ class CursorErrTest(fixtures.TestBase):
url = Mock(
get_dialect=lambda: default.DefaultDialect,
_get_entrypoint=lambda: default.DefaultDialect,
+ _instantiate_plugins=lambda kwargs: (),
translate_connect_args=lambda: {}, query={},)
eng = testing_engine(
url, options=dict(module=dbapi, _initialize=initialize))
diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py
index b7bf87d63..f9799fda0 100644
--- a/test/engine/test_reflection.py
+++ b/test/engine/test_reflection.py
@@ -1,16 +1,15 @@
-import operator
-
import unicodedata
import sqlalchemy as sa
-from sqlalchemy import schema, events, event, inspect
+from sqlalchemy import schema, inspect
from sqlalchemy import MetaData, Integer, String
-from sqlalchemy.testing import (ComparesTables, engines, AssertsCompiledSQL,
+from sqlalchemy.testing import (
+ ComparesTables, engines, AssertsCompiledSQL,
fixtures, skip)
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy import testing
from sqlalchemy.util import ue
-
+from sqlalchemy.testing import config
metadata, users = None, None
@@ -1345,6 +1344,18 @@ class SchemaTest(fixtures.TestBase):
metadata.drop_all()
@testing.requires.schemas
+ @testing.provide_metadata
+ def test_schema_translation(self):
+ Table('foob', self.metadata, Column('q', Integer), schema=config.test_schema)
+ self.metadata.create_all()
+
+ m = MetaData()
+ map_ = {"foob": config.test_schema}
+ with config.db.connect().execution_options(schema_translate_map=map_) as conn:
+ t = Table('foob', m, schema="foob", autoload_with=conn)
+ eq_(t.schema, "foob")
+ eq_(t.c.keys(), ['q'])
+ @testing.requires.schemas
@testing.fails_on('sybase', 'FIXME: unknown')
def test_explicit_default_schema_metadata(self):
engine = testing.db
diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py
index 911d4bc5c..5f03a613b 100644
--- a/test/orm/inheritance/test_basic.py
+++ b/test/orm/inheritance/test_basic.py
@@ -5,7 +5,7 @@ from sqlalchemy import exc as sa_exc, util, event
from sqlalchemy.orm import *
from sqlalchemy.orm.util import instance_str
from sqlalchemy.orm import exc as orm_exc, attributes
-from sqlalchemy.testing.assertsql import AllOf, CompiledSQL, Or
+from sqlalchemy.testing.assertsql import AllOf, CompiledSQL, RegexSQL, Or
from sqlalchemy.sql import table, column
from sqlalchemy import testing
from sqlalchemy.testing import engines
@@ -1149,6 +1149,242 @@ class FlushTest(fixtures.MappedTest):
assert user_roles.count().scalar() == 1
+class PassiveDeletesTest(fixtures.MappedTest):
+ __requires__ = ('foreign_keys',)
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ "a", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(30))
+ )
+ Table(
+ "b", metadata,
+ Column(
+ 'id', Integer, ForeignKey('a.id', ondelete="CASCADE"),
+ primary_key=True),
+ Column('data', String(10))
+ )
+
+ Table(
+ "c", metadata,
+ Column('cid', Integer, primary_key=True),
+ Column('bid', ForeignKey('b.id', ondelete="CASCADE"))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Basic):
+ pass
+
+ class B(A):
+ pass
+
+ class C(B):
+ pass
+
+ def _fixture(self, a_p=False, b_p=False, c_p=False):
+ A, B, C = self.classes("A", "B", "C")
+ a, b, c = self.tables("a", "b", "c")
+
+ mapper(
+ A, a, passive_deletes=a_p,
+ polymorphic_on=a.c.type, polymorphic_identity='a')
+ mapper(
+ B, b, inherits=A, passive_deletes=b_p, polymorphic_identity='b')
+ mapper(
+ C, c, inherits=B, passive_deletes=c_p, polymorphic_identity='c')
+
+ def test_none(self):
+ A, B, C = self.classes("A", "B", "C")
+ self._fixture()
+
+ s = Session()
+ a1, b1, c1 = A(id=1), B(id=2), C(cid=1, id=3)
+ s.add_all([a1, b1, c1])
+ s.commit()
+
+ # want to see if the 'C' table loads even though
+ # a and b are loaded
+ c1 = s.query(B).filter_by(id=3).first()
+ s.delete(c1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ RegexSQL(
+ "SELECT .* "
+ "FROM c WHERE :param_1 = c.bid",
+ [{'param_1': 3}]
+ ),
+ CompiledSQL(
+ "DELETE FROM c WHERE c.cid = :cid",
+ [{'cid': 1}]
+ ),
+ CompiledSQL(
+ "DELETE FROM b WHERE b.id = :id",
+ [{'id': 3}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 3}]
+ )
+ )
+
+ def test_c_only(self):
+ A, B, C = self.classes("A", "B", "C")
+ self._fixture(c_p=True)
+
+ s = Session()
+ a1, b1, c1 = A(id=1), B(id=2), C(cid=1, id=3)
+ s.add_all([a1, b1, c1])
+ s.commit()
+
+ s.delete(a1)
+
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "SELECT a.id AS a_id, a.type AS a_type "
+ "FROM a WHERE a.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 1}]
+ )
+ )
+
+ b1.id
+ s.delete(b1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM b WHERE b.id = :id",
+ [{'id': 2}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 2}]
+ )
+ )
+
+ # want to see if the 'C' table loads even though
+ # a and b are loaded
+ c1 = s.query(A).filter_by(id=3).first()
+ s.delete(c1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM b WHERE b.id = :id",
+ [{'id': 3}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 3}]
+ )
+ )
+
+ def test_b_only(self):
+ A, B, C = self.classes("A", "B", "C")
+ self._fixture(b_p=True)
+
+ s = Session()
+ a1, b1, c1 = A(id=1), B(id=2), C(cid=1, id=3)
+ s.add_all([a1, b1, c1])
+ s.commit()
+
+ s.delete(a1)
+
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "SELECT a.id AS a_id, a.type AS a_type "
+ "FROM a WHERE a.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 1}]
+ )
+ )
+
+ b1.id
+ s.delete(b1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 2}]
+ )
+ )
+
+ c1.id
+ s.delete(c1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 3}]
+ )
+ )
+
+ def test_a_only(self):
+ A, B, C = self.classes("A", "B", "C")
+ self._fixture(a_p=True)
+
+ s = Session()
+ a1, b1, c1 = A(id=1), B(id=2), C(cid=1, id=3)
+ s.add_all([a1, b1, c1])
+ s.commit()
+
+ s.delete(a1)
+
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "SELECT a.id AS a_id, a.type AS a_type "
+ "FROM a WHERE a.id = :param_1",
+ [{'param_1': 1}]
+ ),
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 1}]
+ )
+ )
+
+ b1.id
+ s.delete(b1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 2}]
+ )
+ )
+
+ # want to see if the 'C' table loads even though
+ # a and b are loaded
+ c1 = s.query(A).filter_by(id=3).first()
+ s.delete(c1)
+ with self.sql_execution_asserter(testing.db) as asserter:
+ s.flush()
+ asserter.assert_(
+ CompiledSQL(
+ "DELETE FROM a WHERE a.id = :id",
+ [{'id': 3}]
+ )
+ )
+
+
class OptimizedGetOnDeferredTest(fixtures.MappedTest):
"""test that the 'optimized get' path accommodates deferred columns."""
diff --git a/test/orm/inheritance/test_concrete.py b/test/orm/inheritance/test_concrete.py
index 573913f74..2539d4737 100644
--- a/test/orm/inheritance/test_concrete.py
+++ b/test/orm/inheritance/test_concrete.py
@@ -486,6 +486,45 @@ class PropertyInheritanceTest(fixtures.MappedTest):
assert dest1.many_b == [b1, b2]
assert sess.query(B).filter(B.bname == 'b1').one() is b1
+ def test_overlapping_backref_relationship(self):
+ A, B, b_table, a_table, Dest, dest_table = (
+ self.classes.A,
+ self.classes.B,
+ self.tables.b_table,
+ self.tables.a_table,
+ self.classes.Dest,
+ self.tables.dest_table)
+
+ # test issue #3630, no error or warning is generated
+ mapper(A, a_table)
+ mapper(B, b_table, inherits=A, concrete=True)
+ mapper(Dest, dest_table, properties={
+ 'a': relationship(A, backref='dest'),
+ 'a1': relationship(B, backref='dest')
+ })
+ configure_mappers()
+
+ def test_overlapping_forwards_relationship(self):
+ A, B, b_table, a_table, Dest, dest_table = (
+ self.classes.A,
+ self.classes.B,
+ self.tables.b_table,
+ self.tables.a_table,
+ self.classes.Dest,
+ self.tables.dest_table)
+
+ # this is the opposite mapping as that of #3630, never generated
+ # an error / warning
+ mapper(A, a_table, properties={
+ 'dest': relationship(Dest, backref='a')
+ })
+ mapper(B, b_table, inherits=A, concrete=True, properties={
+ 'dest': relationship(Dest, backref='a1')
+ })
+ mapper(Dest, dest_table)
+ configure_mappers()
+
+
def test_polymorphic_backref(self):
"""test multiple backrefs to the same polymorphically-loading
attribute."""
diff --git a/test/orm/test_deferred.py b/test/orm/test_deferred.py
index 29087fdb8..7f449c40a 100644
--- a/test/orm/test_deferred.py
+++ b/test/orm/test_deferred.py
@@ -320,6 +320,64 @@ class DeferredOptionsTest(AssertsCompiledSQL, _fixtures.FixtureTest):
"FROM orders ORDER BY orders.id",
{})])
+ def test_undefer_group_multi(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties=util.OrderedDict([
+ ('userident', deferred(orders.c.user_id, group='primary')),
+ ('description', deferred(orders.c.description, group='primary')),
+ ('opened', deferred(orders.c.isopen, group='secondary'))
+ ]
+ ))
+
+ sess = create_session()
+ q = sess.query(Order).order_by(Order.id)
+ def go():
+ l = q.options(
+ undefer_group('primary'), undefer_group('secondary')).all()
+ o2 = l[2]
+ eq_(o2.opened, 1)
+ eq_(o2.userident, 7)
+ eq_(o2.description, 'order 3')
+
+ self.sql_eq_(go, [
+ ("SELECT orders.user_id AS orders_user_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen, "
+ "orders.id AS orders_id, "
+ "orders.address_id AS orders_address_id "
+ "FROM orders ORDER BY orders.id",
+ {})])
+
+ def test_undefer_group_multi_pathed(self):
+ orders, Order = self.tables.orders, self.classes.Order
+
+ mapper(Order, orders, properties=util.OrderedDict([
+ ('userident', deferred(orders.c.user_id, group='primary')),
+ ('description', deferred(orders.c.description, group='primary')),
+ ('opened', deferred(orders.c.isopen, group='secondary'))
+ ]
+ ))
+
+ sess = create_session()
+ q = sess.query(Order).order_by(Order.id)
+ def go():
+ l = q.options(
+ Load(Order).undefer_group('primary').undefer_group('secondary')).all()
+ o2 = l[2]
+ eq_(o2.opened, 1)
+ eq_(o2.userident, 7)
+ eq_(o2.description, 'order 3')
+
+ self.sql_eq_(go, [
+ ("SELECT orders.user_id AS orders_user_id, "
+ "orders.description AS orders_description, "
+ "orders.isopen AS orders_isopen, "
+ "orders.id AS orders_id, "
+ "orders.address_id AS orders_address_id "
+ "FROM orders ORDER BY orders.id",
+ {})])
+
def test_undefer_star(self):
orders, Order = self.tables.orders, self.classes.Order
diff --git a/test/orm/test_options.py b/test/orm/test_options.py
index e1e26c62c..e7b750cf4 100644
--- a/test/orm/test_options.py
+++ b/test/orm/test_options.py
@@ -3,11 +3,14 @@ from sqlalchemy.orm import attributes, mapper, relationship, backref, \
configure_mappers, create_session, synonym, Session, class_mapper, \
aliased, column_property, joinedload_all, joinedload, Query,\
util as orm_util, Load, defer
+from sqlalchemy.orm.query import QueryContext
+from sqlalchemy.orm import strategy_options
import sqlalchemy as sa
from sqlalchemy import testing
-from sqlalchemy.testing.assertions import eq_, assert_raises, assert_raises_message
+from sqlalchemy.testing.assertions import eq_, assert_raises_message
from test.orm import _fixtures
+
class QueryTest(_fixtures.FixtureTest):
run_setup_mappers = 'once'
run_inserts = 'once'
@@ -17,6 +20,7 @@ class QueryTest(_fixtures.FixtureTest):
def setup_mappers(cls):
cls._setup_stock_mapping()
+
class PathTest(object):
def _make_path(self, path):
r = []
@@ -160,11 +164,11 @@ class LoadTest(PathTest, QueryTest):
)
+
+
class OptionsTest(PathTest, QueryTest):
def _option_fixture(self, *arg):
- from sqlalchemy.orm import strategy_options
-
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.joinedload, arg, True, {})
@@ -768,3 +772,121 @@ class OptionsNoPropTest(_fixtures.FixtureTest):
create_session().query(column).options,
joinedload(eager_option))
+
+class LocalOptsTest(PathTest, QueryTest):
+ @classmethod
+ def setup_class(cls):
+ super(LocalOptsTest, cls).setup_class()
+
+ @strategy_options.loader_option()
+ def some_col_opt_only(loadopt, key, opts):
+ return loadopt.set_column_strategy(
+ (key, ),
+ None,
+ opts,
+ opts_only=True
+ )
+
+ @strategy_options.loader_option()
+ def some_col_opt_strategy(loadopt, key, opts):
+ return loadopt.set_column_strategy(
+ (key, ),
+ {"deferred": True, "instrument": True},
+ opts
+ )
+
+ cls.some_col_opt_only = some_col_opt_only
+ cls.some_col_opt_strategy = some_col_opt_strategy
+
+ def _assert_attrs(self, opts, expected):
+ User = self.classes.User
+
+ query = create_session().query(User)
+ attr = {}
+
+ for opt in opts:
+ if isinstance(opt, strategy_options._UnboundLoad):
+ for tb in opt._to_bind:
+ tb._bind_loader(query, attr, False)
+ else:
+ attr.update(opt.context)
+
+ key = (
+ 'loader',
+ tuple(inspect(User)._path_registry[User.name.property]))
+ eq_(
+ attr[key].local_opts,
+ expected
+ )
+
+ def test_single_opt_only(self):
+ opt = strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"foo": "bar"}
+ )
+ self._assert_attrs([opt], {"foo": "bar"})
+
+ def test_unbound_multiple_opt_only(self):
+ opts = [
+ strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"foo": "bar"}
+ ),
+ strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"bat": "hoho"}
+ )
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_bound_multiple_opt_only(self):
+ User = self.classes.User
+ opts = [
+ Load(User).some_col_opt_only(
+ "name", {"foo": "bar"}
+ ).some_col_opt_only(
+ "name", {"bat": "hoho"}
+ )
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_bound_strat_opt_recvs_from_optonly(self):
+ User = self.classes.User
+ opts = [
+ Load(User).some_col_opt_only(
+ "name", {"foo": "bar"}
+ ).some_col_opt_strategy(
+ "name", {"bat": "hoho"}
+ )
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_unbound_strat_opt_recvs_from_optonly(self):
+ opts = [
+ strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"foo": "bar"}
+ ),
+ strategy_options._UnboundLoad().some_col_opt_strategy(
+ "name", {"bat": "hoho"}
+ )
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_unbound_opt_only_adds_to_strat(self):
+ opts = [
+ strategy_options._UnboundLoad().some_col_opt_strategy(
+ "name", {"bat": "hoho"}
+ ),
+ strategy_options._UnboundLoad().some_col_opt_only(
+ "name", {"foo": "bar"}
+ ),
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
+
+ def test_bound_opt_only_adds_to_strat(self):
+ User = self.classes.User
+ opts = [
+ Load(User).some_col_opt_strategy(
+ "name", {"bat": "hoho"}
+ ).some_col_opt_only(
+ "name", {"foo": "bar"}
+ ),
+ ]
+ self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index d2f9e4a66..6445ffefd 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -1,7 +1,7 @@
from sqlalchemy import (
testing, null, exists, text, union, literal, literal_column, func, between,
Unicode, desc, and_, bindparam, select, distinct, or_, collate, insert,
- Integer, String, Boolean, exc as sa_exc, util, cast)
+ Integer, String, Boolean, exc as sa_exc, util, cast, MetaData)
from sqlalchemy.sql import operators, expression
from sqlalchemy import column, table
from sqlalchemy.engine import default
@@ -13,7 +13,8 @@ from sqlalchemy.testing.assertsql import CompiledSQL
from sqlalchemy.testing.schema import Table, Column
import sqlalchemy as sa
from sqlalchemy.testing.assertions import (
- eq_, assert_raises, assert_raises_message, expect_warnings)
+ eq_, assert_raises, assert_raises_message, expect_warnings,
+ eq_ignore_whitespace)
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, assert_warnings
from test.orm import _fixtures
from sqlalchemy.orm.util import join, with_parent
@@ -210,6 +211,69 @@ class RowTupleTest(QueryTest):
)
+class BindSensitiveStringifyTest(fixtures.TestBase):
+ def _fixture(self, bind_to=None):
+ # building a totally separate metadata /mapping here
+ # because we need to control if the MetaData is bound or not
+
+ class User(object):
+ pass
+
+ m = MetaData(bind=bind_to)
+ user_table = Table(
+ 'users', m,
+ Column('id', Integer, primary_key=True),
+ Column('name', String(50)))
+
+ mapper(User, user_table)
+ return User
+
+ def _dialect_fixture(self):
+ class MyDialect(default.DefaultDialect):
+ default_paramstyle = 'qmark'
+
+ from sqlalchemy.engine import base
+ return base.Engine(mock.Mock(), MyDialect(), mock.Mock())
+
+ def _test(
+ self, bound_metadata, bound_session,
+ session_present, expect_bound):
+ if bound_metadata or bound_session:
+ eng = self._dialect_fixture()
+ else:
+ eng = None
+
+ User = self._fixture(bind_to=eng if bound_metadata else None)
+
+ s = Session(eng if bound_session else None)
+ q = s.query(User).filter(User.id == 7)
+ if not session_present:
+ q = q.with_session(None)
+
+ eq_ignore_whitespace(
+ str(q),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = ?" if expect_bound else
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = :id_1"
+ )
+
+ def test_query_unbound_metadata_bound_session(self):
+ self._test(False, True, True, True)
+
+ def test_query_bound_metadata_unbound_session(self):
+ self._test(True, False, True, True)
+
+ def test_query_unbound_metadata_no_session(self):
+ self._test(False, False, False, False)
+
+ def test_query_unbound_metadata_unbound_session(self):
+ self._test(False, False, True, False)
+
+ def test_query_bound_metadata_bound_session(self):
+ self._test(True, True, True, True)
+
+
class RawSelectTest(QueryTest, AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/profiles.txt b/test/profiles.txt
index f6b682be1..519259e75 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -1,4 +1,4 @@
-# /Users/classic/dev/sqlalchemy/test/profiles.txt
+# /home/classic/dev/sqlalchemy/test/profiles.txt
# This file is written out on a per-environment basis.
# For each test in aaa_profiling, the corresponding function and
# environment is located within this file. If it doesn't exist,
@@ -13,546 +13,530 @@
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.6_sqlite_pysqlite_nocextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 74
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_pymysql_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_pymysql_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_pymysql_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_pymysql_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_cextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_dbapiunicode_nocextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_cextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_pymysql_dbapiunicode_nocextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_cextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_cextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 73
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqldb_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqldb_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_pymysql_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_pymysql_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_mysql_mysqldb_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_mysql_mysqldb_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_mysql_pymysql_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_mysql_pymysql_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_sqlite_pysqlite_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 76
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.6_sqlite_pysqlite_nocextensions 157
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 157
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 153
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_pymysql_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_pymysql_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_cextensions 170
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_nocextensions 170
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_cextensions 170
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_nocextensions 170
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_cextensions 170
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_nocextensions 170
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_cextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_dbapiunicode_nocextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_cextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_pymysql_dbapiunicode_nocextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_cextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_cextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 156
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqldb_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqldb_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_mysql_mysqldb_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_mysql_mysqldb_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_mysql_pymysql_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_mysql_pymysql_dbapiunicode_nocextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_sqlite_pysqlite_dbapiunicode_cextensions 169
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 169
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.6_sqlite_pysqlite_nocextensions 190
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 188
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 188
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 190
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 188
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 188
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 188
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_pymysql_cextensions 201
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_pymysql_nocextensions 201
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 201
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 201
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 201
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 201
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_cextensions 203
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_nocextensions 203
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_cextensions 203
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_nocextensions 203
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_cextensions 203
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_nocextensions 203
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_dbapiunicode_cextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_dbapiunicode_nocextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_pymysql_dbapiunicode_cextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_pymysql_dbapiunicode_nocextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_dbapiunicode_cextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_cextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 189
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqldb_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqldb_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_mysql_mysqldb_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_mysql_mysqldb_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_mysql_pymysql_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_mysql_pymysql_dbapiunicode_nocextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_sqlite_pysqlite_dbapiunicode_cextensions 202
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 202
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.6_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_pymysql_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_pymysql_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_pymysql_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_pymysql_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_nocextensions 78
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_cextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_pymysql_dbapiunicode_nocextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_cextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 76
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqldb_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqldb_dbapiunicode_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_pymysql_dbapiunicode_cextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_pymysql_dbapiunicode_nocextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_mysql_mysqldb_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_mysql_mysqldb_dbapiunicode_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_mysql_pymysql_dbapiunicode_cextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_mysql_pymysql_dbapiunicode_nocextensions 75
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_sqlite_pysqlite_dbapiunicode_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 77
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.6_sqlite_pysqlite_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 147
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_pymysql_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_pymysql_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_nocextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 147
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_cextensions 146
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_nocextensions 146
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_pymysql_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqldb_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqldb_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_pymysql_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_mysql_mysqldb_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_mysql_mysqldb_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_mysql_pymysql_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_mysql_pymysql_dbapiunicode_nocextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_sqlite_pysqlite_dbapiunicode_cextensions 147
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 147
# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.6_sqlite_pysqlite_nocextensions 4262
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_cextensions 4262
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_nocextensions 4262
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_cextensions 4257
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_nocextensions 4262
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4262
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4262
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_pymysql_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_pymysql_nocextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_nocextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_nocextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_pymysql_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_pymysql_nocextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_nocextensions 4258
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_cextensions 4263
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_nocextensions 4263
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_dbapiunicode_cextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_dbapiunicode_nocextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_dbapiunicode_cextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_dbapiunicode_cextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 4260
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_mysqldb_dbapiunicode_cextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_mysqldb_dbapiunicode_nocextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_dbapiunicode_cextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_dbapiunicode_cextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.5_mysql_mysqldb_dbapiunicode_cextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.5_mysql_mysqldb_dbapiunicode_nocextensions 4261
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.5_sqlite_pysqlite_dbapiunicode_cextensions 4256
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 4256
# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.6_sqlite_pysqlite_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_cextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_cextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_nocextensions 6426
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_pymysql_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_pymysql_nocextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_nocextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_nocextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_mysql_pymysql_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_mysql_pymysql_nocextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_postgresql_psycopg2_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_postgresql_psycopg2_nocextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_cextensions 6428
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_nocextensions 6428
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_dbapiunicode_cextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_dbapiunicode_nocextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_dbapiunicode_cextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_dbapiunicode_cextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6424
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_mysql_mysqldb_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_mysql_mysqldb_dbapiunicode_nocextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_postgresql_psycopg2_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.5_mysql_mysqldb_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.5_mysql_mysqldb_dbapiunicode_nocextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.5_sqlite_pysqlite_dbapiunicode_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 6426
# TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.6_sqlite_pysqlite_nocextensions 26358
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_cextensions 16194
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_nocextensions 25197
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_cextensions 29184
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_nocextensions 37180
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 16329
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 25332
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_pymysql_cextensions 130997
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_pymysql_nocextensions 140000
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_cextensions 17191
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 26194
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 17361
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 26364
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_cextensions 83733
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_nocextensions 92736
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_cextensions 18221
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_nocextensions 27201
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_cextensions 18393
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_nocextensions 27396
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_dbapiunicode_cextensions 41218
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_dbapiunicode_nocextensions 50221
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_dbapiunicode_cextensions 29199
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 38202
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_dbapiunicode_cextensions 17164
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 26167
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_mysqldb_dbapiunicode_cextensions 30236
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_mysqldb_dbapiunicode_nocextensions 39239
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_dbapiunicode_cextensions 18213
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 27216
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_dbapiunicode_cextensions 18187
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 27190
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.5_mysql_mysqldb_dbapiunicode_cextensions 30236
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.5_mysql_mysqldb_dbapiunicode_nocextensions 39239
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.5_sqlite_pysqlite_dbapiunicode_cextensions 18173
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 27176
# TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.6_sqlite_pysqlite_nocextensions 26282
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_cextensions 22212
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_nocextensions 25215
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_cextensions 23196
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_nocextensions 25186
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 22269
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 25272
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_pymysql_cextensions 52409
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_pymysql_nocextensions 55412
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_cextensions 23205
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 26208
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 23309
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 26312
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_cextensions 47353
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_nocextensions 50356
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_cextensions 24215
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_nocextensions 27220
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_cextensions 24321
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_nocextensions 27324
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_dbapiunicode_cextensions 23235
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_dbapiunicode_nocextensions 26238
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_dbapiunicode_cextensions 23204
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 26207
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_dbapiunicode_cextensions 23181
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 26184
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_mysqldb_dbapiunicode_cextensions 24260
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_mysqldb_dbapiunicode_nocextensions 27263
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_dbapiunicode_cextensions 24225
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 27228
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_dbapiunicode_cextensions 24211
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 27214
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.5_mysql_mysqldb_dbapiunicode_cextensions 24260
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.5_mysql_mysqldb_dbapiunicode_nocextensions 27263
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.5_sqlite_pysqlite_dbapiunicode_cextensions 24211
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 27219
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.6_sqlite_pysqlite_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_cextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_cextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_pymysql_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_pymysql_nocextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_mysql_pymysql_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_mysql_pymysql_nocextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_postgresql_psycopg2_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_postgresql_psycopg2_nocextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_cextensions 18988
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_dbapiunicode_cextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_dbapiunicode_nocextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_dbapiunicode_cextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_dbapiunicode_cextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_mysql_mysqldb_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_mysql_mysqldb_dbapiunicode_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_postgresql_psycopg2_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.5_mysql_mysqldb_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.5_mysql_mysqldb_dbapiunicode_nocextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.5_sqlite_pysqlite_dbapiunicode_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 18988
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.6_sqlite_pysqlite_nocextensions 161101
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_cextensions 127101
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_nocextensions 128851
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_cextensions 123351
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_nocextensions 121851
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 156351
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 158054
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_pymysql_cextensions 211855
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_pymysql_nocextensions 213605
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_cextensions 125556
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 127306
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 165355
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 167105
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_cextensions 187056
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_nocextensions 188855
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_cextensions 128556
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_nocextensions 130356
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_cextensions 168806
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_nocextensions 170556
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_dbapiunicode_cextensions 132554
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_dbapiunicode_nocextensions 134304
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_dbapiunicode_cextensions 124101
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 125851
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_dbapiunicode_cextensions 121304
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 122852
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_mysqldb_dbapiunicode_cextensions 139356
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_mysqldb_dbapiunicode_nocextensions 141106
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_dbapiunicode_cextensions 129306
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 130857
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_dbapiunicode_cextensions 127556
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 129107
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.5_mysql_mysqldb_dbapiunicode_cextensions 139356
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.5_mysql_mysqldb_dbapiunicode_nocextensions 141106
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.5_sqlite_pysqlite_dbapiunicode_cextensions 127357
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 129306
# TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.6_sqlite_pysqlite_nocextensions 21505
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_cextensions 19393
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_nocextensions 19597
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_cextensions 19024
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_nocextensions 19085
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 21186
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 21437
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_pymysql_cextensions 25404
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_pymysql_nocextensions 25608
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_cextensions 19428
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 19644
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 22066
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 22221
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_cextensions 23716
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_nocextensions 23871
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_cextensions 19552
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_nocextensions 19727
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_cextensions 22051
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_nocextensions 22255
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_dbapiunicode_cextensions 19504
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_dbapiunicode_nocextensions 19702
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_dbapiunicode_cextensions 18956
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 19160
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_dbapiunicode_cextensions 18820
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 18970
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_mysqldb_dbapiunicode_cextensions 20153
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_mysqldb_dbapiunicode_nocextensions 20307
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_dbapiunicode_cextensions 19503
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 19707
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_dbapiunicode_cextensions 19363
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 19623
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.5_mysql_mysqldb_dbapiunicode_cextensions 20153
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.5_mysql_mysqldb_dbapiunicode_nocextensions 20357
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.5_sqlite_pysqlite_dbapiunicode_cextensions 19368
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 19572
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.6_sqlite_pysqlite_nocextensions 1520
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_cextensions 1400
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_nocextensions 1415
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_cextensions 1309
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_nocextensions 1334
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1527
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1542
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_pymysql_cextensions 2327
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_pymysql_nocextensions 2342
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_cextensions 1350
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1365
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1594
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1609
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_cextensions 2038
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_nocextensions 2053
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_cextensions 1335
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_nocextensions 1354
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_cextensions 1577
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_nocextensions 1592
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_dbapiunicode_cextensions 1419
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_dbapiunicode_nocextensions 1433
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_dbapiunicode_cextensions 1316
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 1331
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_dbapiunicode_cextensions 1193
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 1207
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_mysqldb_dbapiunicode_cextensions 1475
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_mysqldb_dbapiunicode_nocextensions 1490
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_dbapiunicode_cextensions 1345
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 1360
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_dbapiunicode_cextensions 1239
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 1253
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.5_mysql_mysqldb_dbapiunicode_cextensions 1475
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.5_mysql_mysqldb_dbapiunicode_nocextensions 1490
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.5_sqlite_pysqlite_dbapiunicode_cextensions 1238
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 1253
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.6_sqlite_pysqlite_nocextensions 89,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_cextensions 93,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_nocextensions 93,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_cextensions 91,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_nocextensions 93,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 93,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 93,19
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_pymysql_cextensions 96,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_pymysql_nocextensions 96,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_cextensions 96,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 96,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 96,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 96,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_cextensions 92,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_nocextensions 92,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_cextensions 92,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_nocextensions 94,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_cextensions 92,20
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_nocextensions 92,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_dbapiunicode_cextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_dbapiunicode_nocextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_dbapiunicode_cextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_dbapiunicode_cextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 91,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_mysqldb_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_mysqldb_dbapiunicode_nocextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.5_mysql_mysqldb_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.5_mysql_mysqldb_dbapiunicode_nocextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.5_sqlite_pysqlite_dbapiunicode_cextensions 94,20
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 94,20
# TEST: test.aaa_profiling.test_orm.QueryTest.test_query_cols
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.6_sqlite_pysqlite_nocextensions 8064
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_cextensions 6220
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_nocextensions 6750
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_cextensions 6798
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_nocextensions 7320
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_cextensions 7564
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_nocextensions 8094
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_mysql_pymysql_cextensions 18754
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_mysql_pymysql_nocextensions 19284
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_postgresql_psycopg2_cextensions 6334
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_postgresql_psycopg2_nocextensions 6864
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_sqlite_pysqlite_cextensions 8016
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_sqlite_pysqlite_nocextensions 8546
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_cextensions 13744
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_nocextensions 14274
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_cextensions 6234
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_nocextensions 6702
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_cextensions 7846
-test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_nocextensions 8376
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_dbapiunicode_cextensions 7860
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_dbapiunicode_nocextensions 8390
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_dbapiunicode_cextensions 6810
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 7340
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_dbapiunicode_cextensions 5864
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 6394
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_mysqldb_dbapiunicode_cextensions 7274
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_mysqldb_dbapiunicode_nocextensions 7804
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_dbapiunicode_cextensions 6184
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 6714
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_dbapiunicode_cextensions 6056
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 6586
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_mysql_mysqldb_dbapiunicode_cextensions 7274
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_mysql_mysqldb_dbapiunicode_nocextensions 7804
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_sqlite_pysqlite_dbapiunicode_cextensions 6054
+test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 6584
# TEST: test.aaa_profiling.test_orm.SessionTest.test_expire_lots
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.6_sqlite_pysqlite_nocextensions 1156
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_cextensions 1145
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_nocextensions 1148
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_cextensions 1139
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_nocextensions 1161
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_cextensions 1151
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_nocextensions 1145
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_mysql_pymysql_cextensions 1267
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_mysql_pymysql_nocextensions 1257
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_postgresql_psycopg2_cextensions 1272
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_postgresql_psycopg2_nocextensions 1264
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_sqlite_pysqlite_cextensions 1264
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_sqlite_pysqlite_nocextensions 1255
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_cextensions 1254
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_nocextensions 1280
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_cextensions 1247
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_nocextensions 1263
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_cextensions 1238
-test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_nocextensions 1272
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_dbapiunicode_cextensions 1140
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_dbapiunicode_nocextensions 1155
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_dbapiunicode_cextensions 1158
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 1159
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_dbapiunicode_cextensions 1133
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 1161
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_mysqldb_dbapiunicode_cextensions 1254
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_mysqldb_dbapiunicode_nocextensions 1255
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_dbapiunicode_cextensions 1247
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 1253
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_dbapiunicode_cextensions 1247
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 1256
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.5_mysql_mysqldb_dbapiunicode_cextensions 1248
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.5_mysql_mysqldb_dbapiunicode_nocextensions 1259
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.5_sqlite_pysqlite_dbapiunicode_cextensions 1269
+test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 1271
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.6_sqlite_pysqlite_nocextensions 97
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_cextensions 95
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_nocextensions 95
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_cextensions 96
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_nocextensions 95
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 95
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 95
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_pymysql_cextensions 82
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_pymysql_nocextensions 82
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_cextensions 82
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 82
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 82
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 82
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_pymysql_cextensions 83
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_pymysql_nocextensions 83
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_cextensions 83
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_nocextensions 83
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_cextensions 83
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_nocextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_dbapiunicode_cextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_dbapiunicode_nocextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_dbapiunicode_cextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_dbapiunicode_cextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 96
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_mysqldb_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_mysqldb_dbapiunicode_nocextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.5_mysql_mysqldb_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.5_mysql_mysqldb_dbapiunicode_nocextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.5_sqlite_pysqlite_dbapiunicode_cextensions 83
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 83
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.6_sqlite_pysqlite_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_cextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_cextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 31
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_pymysql_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_pymysql_nocextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_mysql_pymysql_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_mysql_pymysql_nocextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_postgresql_psycopg2_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_postgresql_psycopg2_nocextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_cextensions 24
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_dbapiunicode_cextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_dbapiunicode_nocextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_dbapiunicode_cextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_dbapiunicode_cextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_mysql_mysqldb_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_mysql_mysqldb_dbapiunicode_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_postgresql_psycopg2_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.5_mysql_mysqldb_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.5_mysql_mysqldb_dbapiunicode_nocextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.5_sqlite_pysqlite_dbapiunicode_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 24
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.6_sqlite_pysqlite_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_pymysql_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_pymysql_nocextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_mysql_pymysql_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_mysql_pymysql_nocextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_postgresql_psycopg2_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_postgresql_psycopg2_nocextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_cextensions 9
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_dbapiunicode_cextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_dbapiunicode_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_dbapiunicode_cextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_dbapiunicode_cextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_mysql_mysqldb_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_mysql_mysqldb_dbapiunicode_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_postgresql_psycopg2_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.5_mysql_mysqldb_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.5_mysql_mysqldb_dbapiunicode_nocextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.5_sqlite_pysqlite_dbapiunicode_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 9
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.6_sqlite_pysqlite_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 43
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 45
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_pymysql_cextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_pymysql_nocextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_pymysql_cextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_pymysql_nocextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_cextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_nocextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_cextensions 47
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_nocextensions 47
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_dbapiunicode_cextensions 47
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_dbapiunicode_nocextensions 49
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_pymysql_dbapiunicode_cextensions 47
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_pymysql_dbapiunicode_nocextensions 49
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_dbapiunicode_cextensions 47
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 49
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_dbapiunicode_cextensions 47
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 49
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqldb_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqldb_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_pymysql_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_pymysql_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_mysql_mysqldb_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_mysql_mysqldb_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_mysql_pymysql_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_mysql_pymysql_dbapiunicode_nocextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_sqlite_pysqlite_dbapiunicode_cextensions 52
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 52
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.6_sqlite_pysqlite_nocextensions 84
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 82
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 84
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 82
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 84
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 82
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 84
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_pymysql_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_pymysql_nocextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_pymysql_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_pymysql_nocextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_nocextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_cextensions 86
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_nocextensions 86
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_dbapiunicode_cextensions 86
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_dbapiunicode_nocextensions 88
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_pymysql_dbapiunicode_cextensions 86
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_pymysql_dbapiunicode_nocextensions 88
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_dbapiunicode_cextensions 86
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 88
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_dbapiunicode_cextensions 86
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 88
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqldb_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqldb_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_pymysql_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_pymysql_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_mysql_mysqldb_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_mysql_mysqldb_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_mysql_pymysql_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_mysql_pymysql_dbapiunicode_nocextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_sqlite_pysqlite_dbapiunicode_cextensions 91
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 91
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.6_sqlite_pysqlite_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_pymysql_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_pymysql_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_pymysql_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_pymysql_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_nocextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_cextensions 16
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_dbapiunicode_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_dbapiunicode_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_pymysql_dbapiunicode_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_pymysql_dbapiunicode_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_dbapiunicode_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_dbapiunicode_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqldb_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqldb_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_pymysql_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_pymysql_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_mysql_mysqldb_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_mysql_mysqldb_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_mysql_pymysql_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_mysql_pymysql_dbapiunicode_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_sqlite_pysqlite_dbapiunicode_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 16
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.6_sqlite_pysqlite_nocextensions 15439
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 488
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15488
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20497
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35477
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 419
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15419
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_pymysql_cextensions 160650
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_pymysql_nocextensions 174650
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 481
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14481
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 440
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14440
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_cextensions 87259
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_nocextensions 101259
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 501
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14501
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 460
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_nocextensions 14460
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_dbapiunicode_cextensions 40510
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_dbapiunicode_nocextensions 55510
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_pymysql_dbapiunicode_cextensions 117410
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_pymysql_dbapiunicode_nocextensions 132410
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_dbapiunicode_cextensions 20497
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 35497
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_dbapiunicode_cextensions 439
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 15439
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqldb_dbapiunicode_cextensions 20518
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqldb_dbapiunicode_nocextensions 34518
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_dbapiunicode_cextensions 87265
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_dbapiunicode_nocextensions 101265
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_dbapiunicode_cextensions 501
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 14501
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_dbapiunicode_cextensions 460
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 14460
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_mysql_mysqldb_dbapiunicode_cextensions 20518
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_mysql_mysqldb_dbapiunicode_nocextensions 34518
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_mysql_pymysql_dbapiunicode_cextensions 87265
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_mysql_pymysql_dbapiunicode_nocextensions 101265
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_sqlite_pysqlite_dbapiunicode_cextensions 460
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 14460
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.6_sqlite_pysqlite_nocextensions 15439
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 488
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45488
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20497
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35477
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 419
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15419
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_pymysql_cextensions 160650
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_pymysql_nocextensions 174650
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 481
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14481
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 440
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14440
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_cextensions 87259
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_nocextensions 101259
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 501
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14501
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 460
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_nocextensions 14460
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_dbapiunicode_cextensions 40510
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_dbapiunicode_nocextensions 55510
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_pymysql_dbapiunicode_cextensions 117410
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_pymysql_dbapiunicode_nocextensions 132410
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_dbapiunicode_cextensions 20497
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 35497
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_dbapiunicode_cextensions 439
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_dbapiunicode_nocextensions 15439
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqldb_dbapiunicode_cextensions 20518
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqldb_dbapiunicode_nocextensions 34518
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_dbapiunicode_cextensions 87265
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_dbapiunicode_nocextensions 101265
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_dbapiunicode_cextensions 501
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 14501
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_dbapiunicode_cextensions 460
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_dbapiunicode_nocextensions 14460
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_mysql_mysqldb_dbapiunicode_cextensions 20518
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_mysql_mysqldb_dbapiunicode_nocextensions 34518
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_mysql_pymysql_dbapiunicode_cextensions 87265
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_mysql_pymysql_dbapiunicode_nocextensions 101265
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_sqlite_pysqlite_dbapiunicode_cextensions 460
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.5_sqlite_pysqlite_dbapiunicode_nocextensions 14460
# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5823,295,3721,11938,1146,2017,2481
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 5833,295,3681,12720,1241,1980,2655
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5591,277,3569,11458,1134,1924,2489
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5613,277,3665,12630,1228,1931,2681
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5619,277,3705,11902,1144,1966,2532
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5625,277,3809,13110,1240,1975,2733
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_cextensions 5834,294,3729,11963,1149,2023,2486
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 5856,294,3833,13221,1256,2030,2707
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_dbapiunicode_cextensions 5597,276,3721,11963,1149,1974,2548
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 5619,276,3817,13135,1243,1981,2740
# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 6437,410,6761,17665,1159,2627
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 6341,407,6703,18167,1244,2598
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 6228,393,6747,17582,1148,2623
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 6318,398,6851,18609,1234,2652
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6257,393,6891,18056,1159,2671
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6418,401,7005,19115,1247,2706
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_cextensions 6367,405,6777,17707,1162,2636
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_dbapiunicode_nocextensions 6452,410,6881,18734,1260,2665
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_dbapiunicode_cextensions 6339,396,6917,18130,1164,2686
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_dbapiunicode_nocextensions 6424,401,7021,19157,1250,2715
diff --git a/test/requirements.py b/test/requirements.py
index ff93a9c3d..522a376e0 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -531,6 +531,18 @@ class DefaultRequirements(SuiteRequirements):
'sybase')
@property
+ def json_type(self):
+ return only_on([
+ lambda config: against(config, "mysql >= 5.7") and
+ not config.db.dialect._is_mariadb,
+ "postgresql >= 9.3"
+ ])
+
+ @property
+ def json_array_indexes(self):
+ return self.json_type + fails_if("+pg8000")
+
+ @property
def datetime_literals(self):
"""target dialect supports rendering of a date, time, or datetime as a
literal string, e.g. via the TypeEngine.literal_processor() method.
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index ffd13309b..85a9f77bc 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -10,7 +10,8 @@ styling and coherent test organization.
"""
-from sqlalchemy.testing import eq_, is_, assert_raises, assert_raises_message
+from sqlalchemy.testing import eq_, is_, assert_raises, \
+ assert_raises_message, eq_ignore_whitespace
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import Integer, String, MetaData, Table, Column, select, \
@@ -2562,7 +2563,7 @@ class UnsupportedTest(fixtures.TestBase):
assert_raises_message(
exc.UnsupportedCompilationError,
- r"Compiler <sqlalchemy.sql.compiler.SQLCompiler .*"
+ r"Compiler <sqlalchemy.sql.compiler.StrSQLCompiler .*"
r"can't render element of type <class '.*SomeElement'>",
SomeElement().compile
)
@@ -2578,7 +2579,7 @@ class UnsupportedTest(fixtures.TestBase):
assert_raises_message(
exc.UnsupportedCompilationError,
- r"Compiler <sqlalchemy.sql.compiler.SQLCompiler .*"
+ r"Compiler <sqlalchemy.sql.compiler.StrSQLCompiler .*"
r"can't render element of type <class '.*SomeElement'>",
SomeElement().compile
)
@@ -2591,12 +2592,76 @@ class UnsupportedTest(fixtures.TestBase):
binary = BinaryExpression(column("foo"), column("bar"), myop)
assert_raises_message(
exc.UnsupportedCompilationError,
- r"Compiler <sqlalchemy.sql.compiler.SQLCompiler .*"
+ r"Compiler <sqlalchemy.sql.compiler.StrSQLCompiler .*"
r"can't render element of type <function.*",
binary.compile
)
+class StringifySpecialTest(fixtures.TestBase):
+ def test_basic(self):
+ stmt = select([table1]).where(table1.c.myid == 10)
+ eq_ignore_whitespace(
+ str(stmt),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE mytable.myid = :myid_1"
+ )
+
+ def test_cte(self):
+ # stringify of these was supported anyway by defaultdialect.
+ stmt = select([table1.c.myid]).cte()
+ stmt = select([stmt])
+ eq_ignore_whitespace(
+ str(stmt),
+ "WITH anon_1 AS (SELECT mytable.myid AS myid FROM mytable) "
+ "SELECT anon_1.myid FROM anon_1"
+ )
+
+ def test_returning(self):
+ stmt = table1.insert().returning(table1.c.myid)
+
+ eq_ignore_whitespace(
+ str(stmt),
+ "INSERT INTO mytable (myid, name, description) "
+ "VALUES (:myid, :name, :description) RETURNING mytable.myid"
+ )
+
+ def test_array_index(self):
+ stmt = select([column('foo', types.ARRAY(Integer))[5]])
+
+ eq_ignore_whitespace(
+ str(stmt),
+ "SELECT foo[:foo_1] AS anon_1"
+ )
+
+ def test_unknown_type(self):
+ class MyType(types.TypeEngine):
+ __visit_name__ = 'mytype'
+
+ stmt = select([cast(table1.c.myid, MyType)])
+
+ eq_ignore_whitespace(
+ str(stmt),
+ "SELECT CAST(mytable.myid AS MyType) AS anon_1 FROM mytable"
+ )
+
+ def test_within_group(self):
+ # stringify of these was supported anyway by defaultdialect.
+ from sqlalchemy import within_group
+ stmt = select([
+ table1.c.myid,
+ within_group(
+ func.percentile_cont(0.5),
+ table1.c.name.desc()
+ )
+ ])
+ eq_ignore_whitespace(
+ str(stmt),
+ "SELECT mytable.myid, percentile_cont(:percentile_cont_1) "
+ "WITHIN GROUP (ORDER BY mytable.name DESC) AS anon_1 FROM mytable"
+ )
+
+
class KwargPropagationTest(fixtures.TestBase):
@classmethod
@@ -2955,6 +3020,57 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL):
"CREATE TABLE t1 (q INTEGER, CHECK (a = 1))"
)
+ def test_schema_translate_map_table(self):
+ m = MetaData()
+ t1 = Table('t1', m, Column('q', Integer))
+ t2 = Table('t2', m, Column('q', Integer), schema='foo')
+ t3 = Table('t3', m, Column('q', Integer), schema='bar')
+
+ schema_translate_map = {None: "z", "bar": None, "foo": "bat"}
+
+ self.assert_compile(
+ schema.CreateTable(t1),
+ "CREATE TABLE z.t1 (q INTEGER)",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ schema.CreateTable(t2),
+ "CREATE TABLE bat.t2 (q INTEGER)",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ schema.CreateTable(t3),
+ "CREATE TABLE t3 (q INTEGER)",
+ schema_translate_map=schema_translate_map
+ )
+
+ def test_schema_translate_map_sequence(self):
+ s1 = schema.Sequence('s1')
+ s2 = schema.Sequence('s2', schema='foo')
+ s3 = schema.Sequence('s3', schema='bar')
+
+ schema_translate_map = {None: "z", "bar": None, "foo": "bat"}
+
+ self.assert_compile(
+ schema.CreateSequence(s1),
+ "CREATE SEQUENCE z.s1",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ schema.CreateSequence(s2),
+ "CREATE SEQUENCE bat.s2",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ schema.CreateSequence(s3),
+ "CREATE SEQUENCE s3",
+ schema_translate_map=schema_translate_map
+ )
+
class InlineDefaultTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@@ -3049,6 +3165,82 @@ class SchemaTest(fixtures.TestBase, AssertsCompiledSQL):
' "dbo.remote_owner".remotetable'
)
+ def test_schema_translate_select(self):
+ schema_translate_map = {"remote_owner": "foob", None: 'bar'}
+
+ self.assert_compile(
+ table1.select().where(table1.c.name == 'hi'),
+ "SELECT bar.mytable.myid, bar.mytable.name, "
+ "bar.mytable.description FROM bar.mytable "
+ "WHERE bar.mytable.name = :name_1",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table4.select().where(table4.c.value == 'hi'),
+ "SELECT foob.remotetable.rem_id, foob.remotetable.datatype_id, "
+ "foob.remotetable.value FROM foob.remotetable "
+ "WHERE foob.remotetable.value = :value_1",
+ schema_translate_map=schema_translate_map
+ )
+
+ schema_translate_map = {"remote_owner": "foob"}
+ self.assert_compile(
+ select([
+ table1, table4
+ ]).select_from(
+ join(table1, table4, table1.c.myid == table4.c.rem_id)
+ ),
+ "SELECT mytable.myid, mytable.name, mytable.description, "
+ "foob.remotetable.rem_id, foob.remotetable.datatype_id, "
+ "foob.remotetable.value FROM mytable JOIN foob.remotetable "
+ "ON foob.remotetable.rem_id = mytable.myid",
+ schema_translate_map=schema_translate_map
+ )
+
+ def test_schema_translate_crud(self):
+ schema_translate_map = {"remote_owner": "foob", None: 'bar'}
+
+ self.assert_compile(
+ table1.insert().values(description='foo'),
+ "INSERT INTO bar.mytable (description) VALUES (:description)",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table1.update().where(table1.c.name == 'hi').
+ values(description='foo'),
+ "UPDATE bar.mytable SET description=:description "
+ "WHERE bar.mytable.name = :name_1",
+ schema_translate_map=schema_translate_map
+ )
+ self.assert_compile(
+ table1.delete().where(table1.c.name == 'hi'),
+ "DELETE FROM bar.mytable WHERE bar.mytable.name = :name_1",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table4.insert().values(value='there'),
+ "INSERT INTO foob.remotetable (value) VALUES (:value)",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table4.update().where(table4.c.value == 'hi').
+ values(value='there'),
+ "UPDATE foob.remotetable SET value=:value "
+ "WHERE foob.remotetable.value = :value_1",
+ schema_translate_map=schema_translate_map
+ )
+
+ self.assert_compile(
+ table4.delete().where(table4.c.value == 'hi'),
+ "DELETE FROM foob.remotetable WHERE "
+ "foob.remotetable.value = :value_1",
+ schema_translate_map=schema_translate_map
+ )
+
def test_alias(self):
a = alias(table4, 'remtable')
self.assert_compile(a.select(a.c.datatype_id == 7),
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index 51cfcb919..0074d789b 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -2,7 +2,7 @@ from sqlalchemy.testing import eq_, is_
import datetime
from sqlalchemy import func, select, Integer, literal, DateTime, Table, \
Column, Sequence, MetaData, extract, Date, String, bindparam, \
- literal_column, Array, Numeric
+ literal_column, ARRAY, Numeric
from sqlalchemy.sql import table, column
from sqlalchemy import sql, util
from sqlalchemy.sql.compiler import BIND_TEMPLATES
@@ -558,7 +558,7 @@ class ReturnTypeTest(fixtures.TestBase):
def test_array_agg(self):
expr = func.array_agg(column('data', Integer))
- is_(expr.type._type_affinity, Array)
+ is_(expr.type._type_affinity, ARRAY)
is_(expr.type.item_type._type_affinity, Integer)
def test_mode(self):
@@ -573,13 +573,13 @@ class ReturnTypeTest(fixtures.TestBase):
def test_percentile_cont_array(self):
expr = func.percentile_cont(0.5, 0.7).within_group(
column('data', Integer))
- is_(expr.type._type_affinity, Array)
+ is_(expr.type._type_affinity, ARRAY)
is_(expr.type.item_type._type_affinity, Integer)
def test_percentile_cont_array_desc(self):
expr = func.percentile_cont(0.5, 0.7).within_group(
column('data', Integer).desc())
- is_(expr.type._type_affinity, Array)
+ is_(expr.type._type_affinity, ARRAY)
is_(expr.type.item_type._type_affinity, Integer)
def test_cume_dist(self):
diff --git a/test/sql/test_join_rewriting.py b/test/sql/test_join_rewriting.py
index 922b7f322..c699a5c97 100644
--- a/test/sql/test_join_rewriting.py
+++ b/test/sql/test_join_rewriting.py
@@ -1,3 +1,8 @@
+"""These tests are all about the "join rewriting" feature built
+to support SQLite's lack of right-nested joins. SQlite as of
+version 3.7.16 no longer has this limitation.
+
+"""
from sqlalchemy import Table, Column, Integer, MetaData, ForeignKey, \
select, exists, union
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index bbc318421..050929d3d 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -9,6 +9,7 @@ from sqlalchemy import Integer, String, UniqueConstraint, \
events, Unicode, types as sqltypes, bindparam, \
Table, Column, Boolean, Enum, func, text, TypeDecorator
from sqlalchemy import schema, exc
+from sqlalchemy.engine import default
from sqlalchemy.sql import elements, naming
import sqlalchemy as tsa
from sqlalchemy.testing import fixtures
@@ -1257,6 +1258,25 @@ class TableTest(fixtures.TestBase, AssertsCompiledSQL):
assign2
)
+ def test_c_mutate_after_unpickle(self):
+ m = MetaData()
+
+ y = Column('y', Integer)
+ t1 = Table('t', m, Column('x', Integer), y)
+
+ t2 = pickle.loads(pickle.dumps(t1))
+ z = Column('z', Integer)
+ g = Column('g', Integer)
+ t2.append_column(z)
+
+ is_(t1.c.contains_column(y), True)
+ is_(t2.c.contains_column(y), False)
+ y2 = t2.c.y
+ is_(t2.c.contains_column(y2), True)
+
+ is_(t2.c.contains_column(z), True)
+ is_(t2.c.contains_column(g), False)
+
def test_autoincrement_replace(self):
m = MetaData()
@@ -3682,7 +3702,7 @@ class NamingConventionTest(fixtures.TestBase, AssertsCompiledSQL):
exc.InvalidRequestError,
"Naming convention including \%\(constraint_name\)s token "
"requires that constraint is explicitly named.",
- schema.CreateTable(u1).compile
+ schema.CreateTable(u1).compile, dialect=default.DefaultDialect()
)
def test_schematype_no_ck_name_boolean_no_name(self):
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index 03c0f89be..6a6c749a4 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -15,7 +15,7 @@ from sqlalchemy.sql.elements import _literal_as_text
from sqlalchemy.schema import Column, Table, MetaData
from sqlalchemy.sql import compiler
from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, \
- Boolean, NullType, MatchType, Indexable, Concatenable, Array
+ Boolean, NullType, MatchType, Indexable, Concatenable, ARRAY, JSON
from sqlalchemy.dialects import mysql, firebird, postgresql, oracle, \
sqlite, mssql
from sqlalchemy import util
@@ -632,7 +632,125 @@ class ExtensionOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
)
-class IndexableTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+class JSONIndexOpTest(fixtures.TestBase, testing.AssertsCompiledSQL):
+ def setUp(self):
+ class MyTypeCompiler(compiler.GenericTypeCompiler):
+ def visit_mytype(self, type, **kw):
+ return "MYTYPE"
+
+ def visit_myothertype(self, type, **kw):
+ return "MYOTHERTYPE"
+
+ class MyCompiler(compiler.SQLCompiler):
+
+ def visit_json_getitem_op_binary(self, binary, operator, **kw):
+ return self._generate_generic_binary(
+ binary, " -> ", **kw
+ )
+
+ def visit_json_path_getitem_op_binary(
+ self, binary, operator, **kw):
+ return self._generate_generic_binary(
+ binary, " #> ", **kw
+ )
+
+ def visit_getitem_binary(self, binary, operator, **kw):
+ raise NotImplementedError()
+
+ class MyDialect(default.DefaultDialect):
+ statement_compiler = MyCompiler
+ type_compiler = MyTypeCompiler
+
+ class MyType(JSON):
+ __visit_name__ = 'mytype'
+
+ pass
+
+ self.MyType = MyType
+ self.__dialect__ = MyDialect()
+
+ def test_setup_getitem(self):
+ col = Column('x', self.MyType())
+
+ is_(
+ col[5].type._type_affinity, JSON
+ )
+ is_(
+ col[5]['foo'].type._type_affinity, JSON
+ )
+ is_(
+ col[('a', 'b', 'c')].type._type_affinity, JSON
+ )
+
+ def test_getindex_literal_integer(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[5],
+ "x -> :x_1",
+ checkparams={'x_1': 5}
+ )
+
+ def test_getindex_literal_string(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col['foo'],
+ "x -> :x_1",
+ checkparams={'x_1': 'foo'}
+ )
+
+ def test_path_getindex_literal(self):
+
+ col = Column('x', self.MyType())
+
+ self.assert_compile(
+ col[('a', 'b', 3, 4, 'd')],
+ "x #> :x_1",
+ checkparams={'x_1': ('a', 'b', 3, 4, 'd')}
+ )
+
+ def test_getindex_sqlexpr(self):
+
+ col = Column('x', self.MyType())
+ col2 = Column('y', Integer())
+
+ self.assert_compile(
+ col[col2],
+ "x -> y",
+ checkparams={}
+ )
+
+ self.assert_compile(
+ col[col2 + 8],
+ "x -> (y + :y_1)",
+ checkparams={'y_1': 8}
+ )
+
+ def test_override_operators(self):
+ special_index_op = operators.custom_op('$$>')
+
+ class MyOtherType(JSON, TypeEngine):
+ __visit_name__ = 'myothertype'
+
+ class Comparator(TypeEngine.Comparator):
+
+ def _adapt_expression(self, op, other_comparator):
+ return special_index_op, MyOtherType()
+
+ comparator_factory = Comparator
+
+ col = Column('x', MyOtherType())
+ self.assert_compile(
+ col[5],
+ "x $$> :x_1",
+ checkparams={'x_1': 5}
+ )
+
+
+class ArrayIndexOpTest(fixtures.TestBase, testing.AssertsCompiledSQL):
def setUp(self):
class MyTypeCompiler(compiler.GenericTypeCompiler):
def visit_mytype(self, type, **kw):
@@ -658,31 +776,14 @@ class IndexableTest(fixtures.TestBase, testing.AssertsCompiledSQL):
statement_compiler = MyCompiler
type_compiler = MyTypeCompiler
- class MyType(Indexable, TypeEngine):
+ class MyType(ARRAY):
__visit_name__ = 'mytype'
def __init__(self, zero_indexes=False, dimensions=1):
if zero_indexes:
self.zero_indexes = zero_indexes
self.dimensions = dimensions
-
- class Comparator(Indexable.Comparator):
- def _setup_getitem(self, index):
- if isinstance(index, slice):
- return_type = self.type
- elif self.type.dimensions is None or \
- self.type.dimensions == 1:
- return_type = Integer()
- else:
- adapt_kw = {'dimensions': self.type.dimensions - 1}
- # this is also testing the behavior of adapt()
- # that we can pass kw that override constructor kws.
- # required a small change to util.constructor_copy().
- return_type = self.type.adapt(
- self.type.__class__, **adapt_kw)
-
- return operators.getitem, index, return_type
- comparator_factory = Comparator
+ self.item_type = Integer()
self.MyType = MyType
self.__dialect__ = MyDialect()
@@ -694,13 +795,13 @@ class IndexableTest(fixtures.TestBase, testing.AssertsCompiledSQL):
col = Column('x', self.MyType(dimensions=3))
is_(
- col[5].type._type_affinity, self.MyType
+ col[5].type._type_affinity, ARRAY
)
eq_(
col[5].type.dimensions, 2
)
is_(
- col[5][6].type._type_affinity, self.MyType
+ col[5][6].type._type_affinity, ARRAY
)
eq_(
col[5][6].type.dimensions, 1
@@ -2273,7 +2374,7 @@ class AnyAllTest(fixtures.TestBase, testing.AssertsCompiledSQL):
t = Table(
'tab1', m,
- Column('arrval', Array(Integer)),
+ Column('arrval', ARRAY(Integer)),
Column('data', Integer)
)
return t
diff --git a/test/sql/test_resultset.py b/test/sql/test_resultset.py
index 8461996ea..bd2b8c0ae 100644
--- a/test/sql/test_resultset.py
+++ b/test/sql/test_resultset.py
@@ -10,6 +10,7 @@ from sqlalchemy import (
from sqlalchemy.engine import result as _result
from sqlalchemy.testing.schema import Table, Column
import operator
+from sqlalchemy.testing import assertions
class ResultProxyTest(fixtures.TablesTest):
@@ -317,7 +318,7 @@ class ResultProxyTest(fixtures.TablesTest):
dict(user_id=1, user_name='john'),
)
- # test a little sqlite weirdness - with the UNION,
+ # test a little sqlite < 3.10.0 weirdness - with the UNION,
# cols come back as "users.user_id" in cursor.description
r = testing.db.execute(
text(
@@ -331,7 +332,6 @@ class ResultProxyTest(fixtures.TablesTest):
eq_(r['user_name'], "john")
eq_(list(r.keys()), ["user_id", "user_name"])
- @testing.only_on("sqlite", "sqlite specific feature")
def test_column_accessor_sqlite_raw(self):
users = self.tables.users
@@ -346,13 +346,22 @@ class ResultProxyTest(fixtures.TablesTest):
"users.user_name from users",
bind=testing.db).execution_options(sqlite_raw_colnames=True). \
execute().first()
- not_in_('user_id', r)
- not_in_('user_name', r)
- eq_(r['users.user_id'], 1)
- eq_(r['users.user_name'], "john")
- eq_(list(r.keys()), ["users.user_id", "users.user_name"])
- @testing.only_on("sqlite", "sqlite specific feature")
+ if testing.against("sqlite < 3.10.0"):
+ not_in_('user_id', r)
+ not_in_('user_name', r)
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+
+ eq_(list(r.keys()), ["users.user_id", "users.user_name"])
+ else:
+ not_in_('users.user_id', r)
+ not_in_('users.user_name', r)
+ eq_(r['user_id'], 1)
+ eq_(r['user_name'], "john")
+
+ eq_(list(r.keys()), ["user_id", "user_name"])
+
def test_column_accessor_sqlite_translated(self):
users = self.tables.users
@@ -368,8 +377,14 @@ class ResultProxyTest(fixtures.TablesTest):
bind=testing.db).execute().first()
eq_(r['user_id'], 1)
eq_(r['user_name'], "john")
- eq_(r['users.user_id'], 1)
- eq_(r['users.user_name'], "john")
+
+ if testing.against("sqlite < 3.10.0"):
+ eq_(r['users.user_id'], 1)
+ eq_(r['users.user_name'], "john")
+ else:
+ not_in_('users.user_id', r)
+ not_in_('users.user_name', r)
+
eq_(list(r.keys()), ["user_id", "user_name"])
def test_column_accessor_labels_w_dots(self):
@@ -604,17 +619,11 @@ class ResultProxyTest(fixtures.TablesTest):
lambda: r['user_id']
)
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r[users.c.user_id]
- )
-
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: r[addresses.c.user_id]
- )
+ # pure positional targeting; users.c.user_id
+ # and addresses.c.user_id are known!
+ # works as of 1.1 issue #3501
+ eq_(r[users.c.user_id], 1)
+ eq_(r[addresses.c.user_id], None)
# try to trick it - fake_table isn't in the result!
# we get the correct error
@@ -652,31 +661,17 @@ class ResultProxyTest(fixtures.TablesTest):
result = select([users.c.user_id, ua.c.user_id]).execute()
row = result.first()
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[users.c.user_id]
- )
+ # as of 1.1 issue #3501, we use pure positional
+ # targeting for the column objects here
+ eq_(row[users.c.user_id], 1)
- assert_raises_message(
- exc.InvalidRequestError,
- "Ambiguous column name",
- lambda: row[ua.c.user_id]
- )
+ eq_(row[ua.c.user_id], 1)
- # Unfortunately, this fails -
- # we'd like
- # "Could not locate column in row"
- # to be raised here, but the check for
- # "common column" in _compare_name_for_result()
- # has other requirements to be more liberal.
- # Ultimately the
- # expression system would need a way to determine
- # if given two columns in a "proxy" relationship, if they
- # refer to a different parent table
+ # this now works as of 1.1 issue #3501;
+ # previously this was stuck on "ambiguous column name"
assert_raises_message(
exc.InvalidRequestError,
- "Ambiguous column name",
+ "Could not locate column in row",
lambda: row[u2.c.user_id]
)
@@ -1012,7 +1007,7 @@ class KeyTargetingTest(fixtures.TablesTest):
eq_(row.q, "c1")
assert_raises_message(
exc.InvalidRequestError,
- "Ambiguous column name 'b'",
+ "Ambiguous column name 'a'",
getattr, row, "b"
)
assert_raises_message(
@@ -1134,3 +1129,182 @@ class KeyTargetingTest(fixtures.TablesTest):
in_(keyed2.c.b, row)
in_(stmt.c.keyed2_a, row)
in_(stmt.c.keyed2_b, row)
+
+
+class PositionalTextTest(fixtures.TablesTest):
+ run_inserts = 'once'
+ run_deletes = None
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'text1',
+ metadata,
+ Column("a", CHAR(2)),
+ Column("b", CHAR(2)),
+ Column("c", CHAR(2)),
+ Column("d", CHAR(2))
+ )
+
+ @classmethod
+ def insert_data(cls):
+ cls.tables.text1.insert().execute([
+ dict(a="a1", b="b1", c="c1", d="d1"),
+ ])
+
+ def test_via_column(self):
+ c1, c2, c3, c4 = column('q'), column('p'), column('r'), column('d')
+ stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4)
+
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c2], "b1")
+ eq_(row[c4], "d1")
+ eq_(row[1], "b1")
+ eq_(row["b"], "b1")
+ eq_(row.keys(), ["a", "b", "c", "d"])
+ eq_(row["r"], "c1")
+ eq_(row["d"], "d1")
+
+ def test_fewer_cols_than_sql_positional(self):
+ c1, c2 = column('q'), column('p')
+ stmt = text("select a, b, c, d from text1").columns(c1, c2)
+
+ # no warning as this can be similar for non-positional
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c1], "a1")
+ eq_(row["c"], "c1")
+
+ def test_fewer_cols_than_sql_non_positional(self):
+ c1, c2 = column('a'), column('p')
+ stmt = text("select a, b, c, d from text1").columns(c2, c1, d=CHAR)
+
+ # no warning as this can be similar for non-positional
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ # c1 name matches, locates
+ eq_(row[c1], "a1")
+ eq_(row["c"], "c1")
+
+ # c2 name does not match, doesn't locate
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "in row for column 'p'",
+ lambda: row[c2]
+ )
+
+ def test_more_cols_than_sql(self):
+ c1, c2, c3, c4 = column('q'), column('p'), column('r'), column('d')
+ stmt = text("select a, b from text1").columns(c1, c2, c3, c4)
+
+ with assertions.expect_warnings(
+ r"Number of columns in textual SQL \(4\) is "
+ "smaller than number of columns requested \(2\)"):
+ result = testing.db.execute(stmt)
+
+ row = result.first()
+ eq_(row[c2], "b1")
+
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "in row for column 'r'",
+ lambda: row[c3]
+ )
+
+ def test_dupe_col_obj(self):
+ c1, c2, c3 = column('q'), column('p'), column('r')
+ stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c2)
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Duplicate column expression requested in "
+ "textual SQL: <.*.ColumnClause.*; p>",
+ testing.db.execute, stmt
+ )
+
+ def test_anon_aliased_unique(self):
+ text1 = self.tables.text1
+
+ c1 = text1.c.a.label(None)
+ c2 = text1.alias().c.c
+ c3 = text1.alias().c.b
+ c4 = text1.alias().c.d.label(None)
+
+ stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4)
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c1], "a1")
+ eq_(row[c2], "b1")
+ eq_(row[c3], "c1")
+ eq_(row[c4], "d1")
+
+ # key fallback rules still match this to a column
+ # unambiguously based on its name
+ eq_(row[text1.c.a], "a1")
+
+ # key fallback rules still match this to a column
+ # unambiguously based on its name
+ eq_(row[text1.c.d], "d1")
+
+ # text1.c.b goes nowhere....because we hit key fallback
+ # but the text1.c.b doesn't derive from text1.c.c
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "Could not locate column in row for column 'text1.b'",
+ lambda: row[text1.c.b]
+ )
+
+ def test_anon_aliased_overlapping(self):
+ text1 = self.tables.text1
+
+ c1 = text1.c.a.label(None)
+ c2 = text1.alias().c.a
+ c3 = text1.alias().c.a.label(None)
+ c4 = text1.c.a.label(None)
+
+ stmt = text("select a, b, c, d from text1").columns(c1, c2, c3, c4)
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c1], "a1")
+ eq_(row[c2], "b1")
+ eq_(row[c3], "c1")
+ eq_(row[c4], "d1")
+
+ # key fallback rules still match this to a column
+ # unambiguously based on its name
+ eq_(row[text1.c.a], "a1")
+
+ def test_anon_aliased_name_conflict(self):
+ text1 = self.tables.text1
+
+ c1 = text1.c.a.label("a")
+ c2 = text1.alias().c.a
+ c3 = text1.alias().c.a.label("a")
+ c4 = text1.c.a.label("a")
+
+ # all cols are named "a". if we are positional, we don't care.
+ # this is new logic in 1.1
+ stmt = text("select a, b as a, c as a, d as a from text1").columns(
+ c1, c2, c3, c4)
+ result = testing.db.execute(stmt)
+ row = result.first()
+
+ eq_(row[c1], "a1")
+ eq_(row[c2], "b1")
+ eq_(row[c3], "c1")
+ eq_(row[c4], "d1")
+
+ # fails, because we hit key fallback and find conflicts
+ # in columns that are presnet
+ assert_raises_message(
+ exc.NoSuchColumnError,
+ "Could not locate column in row for column 'text1.a'",
+ lambda: row[text1.c.a]
+ )
diff --git a/test/sql/test_rowcount.py b/test/sql/test_rowcount.py
index 46e10e192..110f3639f 100644
--- a/test/sql/test_rowcount.py
+++ b/test/sql/test_rowcount.py
@@ -1,6 +1,7 @@
from sqlalchemy import *
from sqlalchemy.testing import fixtures, AssertsExecutionResults
from sqlalchemy import testing
+from sqlalchemy.testing import eq_
class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults):
@@ -65,6 +66,22 @@ class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults):
print("expecting 3, dialect reports %s" % r.rowcount)
assert r.rowcount == 3
+ def test_raw_sql_rowcount(self):
+ # test issue #3622, make sure eager rowcount is called for text
+ with testing.db.connect() as conn:
+ result = conn.execute(
+ "update employees set department='Z' where department='C'")
+ eq_(result.rowcount, 3)
+
+ def test_text_rowcount(self):
+ # test issue #3622, make sure eager rowcount is called for text
+ with testing.db.connect() as conn:
+ result = conn.execute(
+ text(
+ "update employees set department='Z' "
+ "where department='C'"))
+ eq_(result.rowcount, 3)
+
def test_delete_rowcount(self):
# WHERE matches 3, 3 rows deleted
department = employees_table.c.department
diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py
index b9cbbf480..7203cc5a3 100644
--- a/test/sql/test_selectable.py
+++ b/test/sql/test_selectable.py
@@ -155,15 +155,19 @@ class SelectableTest(
assert c in s.c.bar.proxy_set
def test_no_error_on_unsupported_expr_key(self):
- from sqlalchemy.dialects.postgresql import ARRAY
+ from sqlalchemy.sql.expression import BinaryExpression
- t = table('t', column('x', ARRAY(Integer)))
+ def myop(x, y):
+ pass
+
+ t = table('t', column('x'), column('y'))
+
+ expr = BinaryExpression(t.c.x, t.c.y, myop)
- expr = t.c.x[5]
s = select([t, expr])
eq_(
s.c.keys(),
- ['x', expr.anon_label]
+ ['x', 'y', expr.anon_label]
)
def test_cloned_intersection(self):
diff --git a/test/sql/test_type_expressions.py b/test/sql/test_type_expressions.py
index 574edfe9e..0ef3a3e16 100644
--- a/test/sql/test_type_expressions.py
+++ b/test/sql/test_type_expressions.py
@@ -59,13 +59,14 @@ class SelectTest(_ExprFixture, fixtures.TestBase, AssertsCompiledSQL):
# the lower() function goes into the result_map, we don't really
# need this but it's fine
self.assert_compile(
- compiled._create_result_map()['test_table_y'][1][2],
+ compiled._create_result_map()['test_table_y'][1][3],
"lower(test_table.y)"
)
# then the original column gets put in there as well.
- # it's not important that it's the last value.
+ # as of 1.1 it's important that it is first as this is
+ # taken as significant by the result processor.
self.assert_compile(
- compiled._create_result_map()['test_table_y'][1][-1],
+ compiled._create_result_map()['test_table_y'][1][0],
"test_table.y"
)
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index f1fb611fb..b08556926 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -10,14 +10,14 @@ from sqlalchemy import (
and_, func, Date, LargeBinary, literal, cast, text, Enum,
type_coerce, VARCHAR, Time, DateTime, BigInteger, SmallInteger, BOOLEAN,
BLOB, NCHAR, NVARCHAR, CLOB, TIME, DATE, DATETIME, TIMESTAMP, SMALLINT,
- INTEGER, DECIMAL, NUMERIC, FLOAT, REAL, Array)
+ INTEGER, DECIMAL, NUMERIC, FLOAT, REAL, ARRAY, JSON)
from sqlalchemy.sql import ddl
from sqlalchemy.sql import visitors
from sqlalchemy import inspection
from sqlalchemy import exc, types, util, dialects
for name in dialects.__all__:
__import__("sqlalchemy.dialects.%s" % name)
-from sqlalchemy.sql import operators, column, table
+from sqlalchemy.sql import operators, column, table, null
from sqlalchemy.schema import CheckConstraint, AddConstraint
from sqlalchemy.engine import default
from sqlalchemy.testing.schema import Table, Column
@@ -140,7 +140,7 @@ class AdaptTest(fixtures.TestBase):
for is_down_adaption, typ, target_adaptions in adaptions():
if typ in (types.TypeDecorator, types.TypeEngine, types.Variant):
continue
- elif issubclass(typ, Array):
+ elif issubclass(typ, ARRAY):
t1 = typ(String)
else:
t1 = typ()
@@ -148,6 +148,8 @@ class AdaptTest(fixtures.TestBase):
if not issubclass(typ, types.Enum) and \
issubclass(cls, types.Enum):
continue
+ if cls.__module__.startswith("test"):
+ continue
# print("ADAPT %s -> %s" % (t1.__class__, cls))
t2 = t1.adapt(cls)
@@ -190,7 +192,7 @@ class AdaptTest(fixtures.TestBase):
for typ in self._all_types():
if typ in (types.TypeDecorator, types.TypeEngine, types.Variant):
continue
- elif issubclass(typ, Array):
+ elif issubclass(typ, ARRAY):
t1 = typ(String)
else:
t1 = typ()
@@ -1015,66 +1017,6 @@ class UnicodeTest(fixtures.TestBase):
"""
__backend__ = True
- def test_native_unicode(self):
- """assert expected values for 'native unicode' mode"""
-
- if testing.against('mssql+pyodbc'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- 'conditional'
- )
-
- elif testing.against('mssql+mxodbc'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- 'conditional'
- )
-
- elif testing.against('mssql+pymssql'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- ('charset' in testing.db.url.query)
- )
-
- elif testing.against('mysql+cymysql', 'mysql+pymssql'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- True if util.py3k else False
- )
- elif testing.against('oracle+cx_oracle'):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- True if util.py3k else "conditional"
- )
- elif testing.against("mysql+mysqldb"):
- eq_(
- testing.db.dialect.returns_unicode_strings,
- True if util.py3k or util.asbool(
- testing.db.url.query.get("use_unicode")
- )
- else False
- )
- else:
- expected = (testing.db.name, testing.db.driver) in \
- (
- ('postgresql', 'psycopg2'),
- ('postgresql', 'psycopg2cffi'),
- ('postgresql', 'pypostgresql'),
- ('postgresql', 'pg8000'),
- ('postgresql', 'zxjdbc'),
- ('mysql', 'pymysql'),
- ('mysql', 'oursql'),
- ('mysql', 'zxjdbc'),
- ('mysql', 'mysqlconnector'),
- ('sqlite', 'pysqlite'),
- ('oracle', 'zxjdbc'),
- )
-
- eq_(
- testing.db.dialect.returns_unicode_strings,
- expected
- )
-
data = util.u(
"Alors vous imaginez ma surprise, au lever du jour, quand "
"une drôle de petite voix m’a réveillé. "
@@ -1406,23 +1348,98 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
return o.read()
+class JSONTest(fixtures.TestBase):
+
+ def setup(self):
+ metadata = MetaData()
+ self.test_table = Table('test_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('test_column', JSON),
+ )
+ self.jsoncol = self.test_table.c.test_column
+
+ self.dialect = default.DefaultDialect()
+ self.dialect._json_serializer = None
+ self.dialect._json_deserializer = None
+
+ def test_bind_serialize_default(self):
+ proc = self.test_table.c.test_column.type._cached_bind_processor(
+ self.dialect)
+ eq_(
+ proc({"A": [1, 2, 3, True, False]}),
+ '{"A": [1, 2, 3, true, false]}'
+ )
+
+ def test_bind_serialize_None(self):
+ proc = self.test_table.c.test_column.type._cached_bind_processor(
+ self.dialect)
+ eq_(
+ proc(None),
+ 'null'
+ )
+
+ def test_bind_serialize_none_as_null(self):
+ proc = JSON(none_as_null=True)._cached_bind_processor(
+ self.dialect)
+ eq_(
+ proc(None),
+ None
+ )
+ eq_(
+ proc(null()),
+ None
+ )
+
+ def test_bind_serialize_null(self):
+ proc = self.test_table.c.test_column.type._cached_bind_processor(
+ self.dialect)
+ eq_(
+ proc(null()),
+ None
+ )
+
+ def test_result_deserialize_default(self):
+ proc = self.test_table.c.test_column.type._cached_result_processor(
+ self.dialect, None)
+ eq_(
+ proc('{"A": [1, 2, 3, true, false]}'),
+ {"A": [1, 2, 3, True, False]}
+ )
+
+ def test_result_deserialize_null(self):
+ proc = self.test_table.c.test_column.type._cached_result_processor(
+ self.dialect, None)
+ eq_(
+ proc('null'),
+ None
+ )
+
+ def test_result_deserialize_None(self):
+ proc = self.test_table.c.test_column.type._cached_result_processor(
+ self.dialect, None)
+ eq_(
+ proc(None),
+ None
+ )
+
+
class ArrayTest(fixtures.TestBase):
def _myarray_fixture(self):
- class MyArray(Array):
+ class MyArray(ARRAY):
pass
return MyArray
def test_array_index_map_dimensions(self):
- col = column('x', Array(Integer, dimensions=3))
+ col = column('x', ARRAY(Integer, dimensions=3))
is_(
- col[5].type._type_affinity, Array
+ col[5].type._type_affinity, ARRAY
)
eq_(
col[5].type.dimensions, 2
)
is_(
- col[5][6].type._type_affinity, Array
+ col[5][6].type._type_affinity, ARRAY
)
eq_(
col[5][6].type.dimensions, 1
@@ -1435,8 +1452,8 @@ class ArrayTest(fixtures.TestBase):
m = MetaData()
arrtable = Table(
'arrtable', m,
- Column('intarr', Array(Integer)),
- Column('strarr', Array(String)),
+ Column('intarr', ARRAY(Integer)),
+ Column('strarr', ARRAY(String)),
)
is_(arrtable.c.intarr[1].type._type_affinity, Integer)
is_(arrtable.c.strarr[1].type._type_affinity, String)
@@ -1445,11 +1462,11 @@ class ArrayTest(fixtures.TestBase):
m = MetaData()
arrtable = Table(
'arrtable', m,
- Column('intarr', Array(Integer)),
- Column('strarr', Array(String)),
+ Column('intarr', ARRAY(Integer)),
+ Column('strarr', ARRAY(String)),
)
- is_(arrtable.c.intarr[1:3].type._type_affinity, Array)
- is_(arrtable.c.strarr[1:3].type._type_affinity, Array)
+ is_(arrtable.c.intarr[1:3].type._type_affinity, ARRAY)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, ARRAY)
def test_array_getitem_slice_type_dialect_level(self):
MyArray = self._myarray_fixture()
@@ -1459,8 +1476,8 @@ class ArrayTest(fixtures.TestBase):
Column('intarr', MyArray(Integer)),
Column('strarr', MyArray(String)),
)
- is_(arrtable.c.intarr[1:3].type._type_affinity, Array)
- is_(arrtable.c.strarr[1:3].type._type_affinity, Array)
+ is_(arrtable.c.intarr[1:3].type._type_affinity, ARRAY)
+ is_(arrtable.c.strarr[1:3].type._type_affinity, ARRAY)
# but the slice returns the actual type
assert isinstance(arrtable.c.intarr[1:3].type, MyArray)
diff --git a/tox.ini b/tox.ini
index 299ca2863..8170cb598 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,52 +1,48 @@
+
[tox]
-envlist = full,py26,py27,py33,py34,py35
+envlist =
+ py{26,27,34,35}-{cext,nocext}-{default,sqlitepg,mysql,oracle,mssql},
+ py27-nocext-cov,
+ py27-cext-cov,
+ py35-nocext-cov
[testenv]
+cov_args=--cov=sqlalchemy --cov-report term --cov-report xml --exclude-tag memory-intensive --exclude-tag timing-intensive -k "not aaa_profiling"
+
deps=pytest
+ pytest-xdist
mock
+ sqlitepg: .[postgresql]
+ mysql: .[mysql]
+ mysql: .[pymysql]
+ oracle: .[oracle]
+ mssql: .[pyodbc]
+ mssql: .[pymssql]
+ cov: pytest-cov
+ cov: .[mysql]
+ cov: .[postgresql]
# -E : ignore PYTHON* environment variables (such as PYTHONPATH)
-# -s : don't add user site directory to sys.path; also PYTHONNOUSERSITE
-# the latter is picked up by conftest.py
+# BASECOMMAND: we can't do section subtitutions in commands
+# (see https://bitbucket.org/hpk42/tox/issues/307/)
setenv=
PYTHONPATH=
- PYTHONNOUSERSITE=1
-
-# we need this because our CI has all the DBAPIs and such
-# pre-installed in individual site-packages directories.
-sitepackages=True
-
-# always install fully and use that; this way options like
-# DISABLE_SQLALCHEMY_CEXT are honored
-usedevelop=False
+ nocext: DISABLE_SQLALCHEMY_CEXT=1
+ BASECOMMAND=python -m pytest -n4 --dropfirst
+ cov: BASECOMMAND=python -m pytest -n4 --dropfirst {[testenv]cov_args}
# tox as of 2.0 blocks all environment variables from the
# outside, unless they are here (or in TOX_TESTENV_PASSENV,
# wildcards OK). Need at least these
passenv=ORACLE_HOME NLS_LANG
-
commands=
- python -m pytest {posargs}
-
-[testenv:full]
-
-
-[testenv:coverage]
-setenv=
- DISABLE_SQLALCHEMY_CEXT=1
-
-# see also .coveragerc
-deps=pytest-cov
- coverage
- mock
-commands=
- python -m pytest --cov=sqlalchemy --cov-report term --cov-report xml \
- --exclude-tag memory-intensive \
- --exclude-tag timing-intensive \
- -k "not aaa_profiling" \
- {posargs}
-
+ default: {env:BASECOMMAND} {posargs}
+ sqlitepg: {env:BASECOMMAND} --db sqlite --db postgresql {posargs}
+ mysql: {env:BASECOMMAND} --db mysql --db pymysql {posargs}
+ oracle: {env:BASECOMMAND} --db oracle {posargs}
+ mssql: {env:BASECOMMAND} --db pyodbc --db pymssql {posargs}
+ cov: {env:BASECOMMAND} --db sqlite --db postgresql --db mysql {posargs}
[testenv:pep8]
deps=flake8