summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/build/changelog/changelog_10.rst168
-rw-r--r--doc/build/conf.py4
-rw-r--r--doc/build/core/connections.rst68
-rw-r--r--doc/build/core/pooling.rst2
-rw-r--r--doc/build/core/tutorial.rst6
-rw-r--r--doc/build/faq/sessions.rst66
-rw-r--r--doc/build/orm/session_basics.rst14
-rw-r--r--lib/sqlalchemy/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py22
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py62
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py2
-rw-r--r--lib/sqlalchemy/engine/base.py32
-rw-r--r--lib/sqlalchemy/engine/interfaces.py6
-rw-r--r--lib/sqlalchemy/engine/result.py23
-rw-r--r--lib/sqlalchemy/ext/automap.py9
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py13
-rw-r--r--lib/sqlalchemy/ext/declarative/base.py1
-rw-r--r--lib/sqlalchemy/ext/hybrid.py2
-rw-r--r--lib/sqlalchemy/orm/interfaces.py2
-rw-r--r--lib/sqlalchemy/orm/loading.py11
-rw-r--r--lib/sqlalchemy/orm/mapper.py33
-rw-r--r--lib/sqlalchemy/orm/persistence.py25
-rw-r--r--lib/sqlalchemy/orm/properties.py2
-rw-r--r--lib/sqlalchemy/orm/util.py9
-rw-r--r--lib/sqlalchemy/sql/compiler.py13
-rw-r--r--lib/sqlalchemy/sql/dml.py10
-rw-r--r--lib/sqlalchemy/sql/elements.py16
-rw-r--r--lib/sqlalchemy/sql/operators.py8
-rw-r--r--lib/sqlalchemy/sql/schema.py57
-rw-r--r--lib/sqlalchemy/testing/__init__.py3
-rw-r--r--lib/sqlalchemy/testing/assertions.py14
-rw-r--r--lib/sqlalchemy/testing/engines.py11
-rw-r--r--lib/sqlalchemy/testing/plugin/plugin_base.py7
-rw-r--r--lib/sqlalchemy/testing/provision.py3
-rw-r--r--test/dialect/mssql/test_types.py159
-rw-r--r--test/dialect/postgresql/test_compiler.py22
-rw-r--r--test/dialect/postgresql/test_reflection.py56
-rw-r--r--test/ext/declarative/test_inheritance.py30
-rw-r--r--test/orm/inheritance/test_basic.py56
-rw-r--r--test/orm/inheritance/test_single.py25
-rw-r--r--test/orm/test_query.py17
-rw-r--r--test/orm/test_unitofworkv2.py108
-rw-r--r--test/orm/test_utils.py50
-rw-r--r--test/orm/test_versioning.py138
-rw-r--r--test/sql/test_defaults.py30
-rw-r--r--test/sql/test_functions.py16
-rw-r--r--test/sql/test_operators.py61
-rw-r--r--test/sql/test_query.py112
48 files changed, 1451 insertions, 155 deletions
diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst
index b30111129..a2b4273bf 100644
--- a/doc/build/changelog/changelog_10.rst
+++ b/doc/build/changelog/changelog_10.rst
@@ -16,7 +16,175 @@
:start-line: 5
.. changelog::
+ :version: 1.0.7
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 3481
+
+ Fixed regression where new methods on :class:`.ResultProxy` used
+ by the ORM :class:`.Query` object (part of the performance
+ enhancements of :ticket:`3175`) would not raise the "this result
+ does not return rows" exception in the case where the driver
+ (typically MySQL) fails to generate cursor.description correctly;
+ an AttributeError against NoneType would be raised instead.
+
+ .. change::
+ :tags: bug, engine
+ :tickets: 3483
+
+ Fixed regression where :meth:`.ResultProxy.keys` would return
+ un-adjusted internal symbol names for "anonymous" labels, which
+ are the "foo_1" types of labels we see generated for SQL functions
+ without labels and similar. This was a side effect of the
+ performance enhancements implemented as part of #918.
+
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3490
+
+ Fixed bug where coersion of literal ``True`` or ``False`` constant
+ in conjunction with :func:`.and_` or :func:`.or_` would fail
+ with an AttributeError.
+
+ .. change::
+ :tags: bug, sql
+ :tickets: 3485
+
+ Fixed potential issue where a custom subclass
+ of :class:`.FunctionElement` or other column element that incorrectly
+ states 'None' or any other invalid object as the ``.type``
+ attribute will report this exception instead of recursion overflow.
+
+ .. change::
+ :tags: bug, sql
+ :pullreq: github:188
+
+ Fixed bug where the modulus SQL operator wouldn't work in reverse
+ due to a missing ``__rmod__`` method. Pull request courtesy
+ dan-gittik.
+
+ .. change::
+ :tags: feature, schema
+ :pullreq: github:186
+
+ Added support for the MINVALUE, MAXVALUE, NO MINVALUE, NO MAXVALUE,
+ and CYCLE arguments for CREATE SEQUENCE as supported by Postgresql
+ and Oracle. Pull request courtesy jakeogh.
+
+ .. change::
+ :tags: bug, orm, declarative
+ :tickets: 3480
+
+ Fixed bug in :class:`.AbstractConcreteBase` extension where
+ a column setup on the ABC base which had a different attribute
+ name vs. column name would not be correctly mapped on the final
+ base class. The failure on 0.9 would be silent whereas on
+ 1.0 it raised an ArgumentError, so may not have been noticed
+ prior to 1.0.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3469
+
+ Fixed 1.0 regression where value objects that override
+ ``__eq__()`` to return a non-boolean-capable object, such as
+ some geoalchemy types as well as numpy types, were being tested
+ for ``bool()`` during a unit of work update operation, where in
+ 0.9 the return value of ``__eq__()`` was tested against "is True"
+ to guard against this.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3468
+
+ Fixed 1.0 regression where a "deferred" attribute would not populate
+ correctly if it were loaded within the "optimized inheritance load",
+ which is a special SELECT emitted in the case of joined table
+ inheritance used to populate expired or unloaded attributes against
+ a joined table without loading the base table. This is related to
+ the fact that SQLA 1.0 no longer guesses about loading deferred
+ columns and must be directed explicitly.
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3466
+
+ Fixed 1.0 regression where the "parent entity" of a synonym-
+ mapped attribute on top of an :func:`.aliased` object would
+ resolve to the original mapper, not the :func:`.aliased`
+ version of it, thereby causing problems for a :class:`.Query`
+ that relies on this attribute (e.g. it's the only representative
+ attribute given in the constructor) to figure out the correct FROM
+ clause for the query.
+
+.. changelog::
:version: 1.0.6
+ :released: June 25, 2015
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3465
+
+ Fixed a major regression in the 1.0 series where the version_id_counter
+ feature would cause an object's version counter to be incremented
+ when there was no net change to the object's row, but instead an object
+ related to it via relationship (e.g. typically many-to-one)
+ were associated or de-associated with it, resulting in an UPDATE
+ statement that updates the object's version counter and nothing else.
+ In the use case where the relatively recent "server side" and/or
+ "programmatic/conditional" version counter feature were used
+ (e.g. setting version_id_generator to False), the bug could cause an
+ UPDATE without a valid SET clause to be emitted.
+
+ .. change::
+ :tags: bug, mssql
+ :tickets: 3464
+
+ Fixed issue when using :class:`.VARBINARY` type in conjunction with
+ an INSERT of NULL + pyodbc; pyodbc requires a special
+ object be passed in order to persist NULL. As the :class:`.VARBINARY`
+ type is now usually the default for :class:`.LargeBinary` due to
+ :ticket:`3039`, this issue is partially a regression in 1.0.
+ The pymssql driver appears to be unaffected.
+
+ .. change::
+ :tags: bug, postgresql, pypy
+ :tickets: 3439
+
+ Re-fixed this issue first released in 1.0.5 to fix psycopg2cffi
+ JSONB support once again, as they suddenly
+ switched on unconditional decoding of JSONB types in version 2.7.1.
+ Version detection now specifies 2.7.1 as where we should expect
+ the DBAPI to do json encoding for us.
+
+ .. change::
+ :tags: feature, postgresql
+ :tickets: 3455
+ :pullreq: github:179
+
+ Added support for storage parameters under CREATE INDEX, using
+ a new keyword argument ``postgresql_with``. Also added support for
+ reflection to support both the ``postgresql_with`` flag as well
+ as the ``postgresql_using`` flag, which will now be set on
+ :class:`.Index` objects that are reflected, as well present
+ in a new "dialect_options" dictionary in the result of
+ :meth:`.Inspector.get_indexes`. Pull request courtesy Pete Hollobon.
+
+ .. seealso::
+
+ :ref:`postgresql_index_storage`
+
+ .. change::
+ :tags: bug, orm
+ :tickets: 3462
+
+ Fixed 1.0 regression where the enhanced behavior of single-inheritance
+ joins of :ticket:`3222` takes place inappropriately
+ for a JOIN along explicit join criteria with a single-inheritance
+ subclass that does not make use of any discriminator, resulting
+ in an additional "AND NULL" clause.
.. change::
:tags: bug, postgresql
diff --git a/doc/build/conf.py b/doc/build/conf.py
index 61635be40..fa9be2d25 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -138,9 +138,9 @@ copyright = u'2007-2015, the SQLAlchemy authors and contributors'
# The short X.Y version.
version = "1.0"
# The full version, including alpha/beta/rc tags.
-release = "1.0.5"
+release = "1.0.6"
-release_date = "June 7, 2015"
+release_date = "June 25, 2015"
site_base = os.environ.get("RTD_SITE_BASE", "http://www.sqlalchemy.org")
site_adapter_template = "docs_adapter.mako"
diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst
index b6770bb82..72e1d6a61 100644
--- a/doc/build/core/connections.rst
+++ b/doc/build/core/connections.rst
@@ -368,6 +368,74 @@ the SQL statement. When the :class:`.ResultProxy` is closed, the underlying
:class:`.Connection` is closed for us, resulting in the
DBAPI connection being returned to the pool with transactional resources removed.
+.. _engine_disposal:
+
+Engine Disposal
+===============
+
+The :class:`.Engine` refers to a connection pool, which means under normal
+circumstances, there are open database connections present while the
+:class:`.Engine` object is still resident in memory. When an :class:`.Engine`
+is garbage collected, its connection pool is no longer referred to by
+that :class:`.Engine`, and assuming none of its connections are still checked
+out, the pool and its connections will also be garbage collected, which has the
+effect of closing out the actual database connections as well. But otherwise,
+the :class:`.Engine` will hold onto open database connections assuming
+it uses the normally default pool implementation of :class:`.QueuePool`.
+
+The :class:`.Engine` is intended to normally be a permanent
+fixture established up-front and maintained throughout the lifespan of an
+application. It is **not** intended to be created and disposed on a
+per-connection basis; it is instead a registry that maintains both a pool
+of connections as well as configurational information about the database
+and DBAPI in use, as well as some degree of internal caching of per-database
+resources.
+
+However, there are many cases where it is desirable that all connection resources
+referred to by the :class:`.Engine` be completely closed out. It's
+generally not a good idea to rely on Python garbage collection for this
+to occur for these cases; instead, the :class:`.Engine` can be explicitly disposed using
+the :meth:`.Engine.dispose` method. This disposes of the engine's
+underlying connection pool and replaces it with a new one that's empty.
+Provided that the :class:`.Engine`
+is discarded at this point and no longer used, all **checked-in** connections
+which it refers to will also be fully closed.
+
+Valid use cases for calling :meth:`.Engine.dispose` include:
+
+* When a program wants to release any remaining checked-in connections
+ held by the connection pool and expects to no longer be connected
+ to that database at all for any future operations.
+
+* When a program uses multiprocessing or ``fork()``, and an
+ :class:`.Engine` object is copied to the child process,
+ :meth:`.Engine.dispose` should be called so that the engine creates
+ brand new database connections local to that fork. Database connections
+ generally do **not** travel across process boundaries.
+
+* Within test suites or multitenancy scenarios where many
+ ad-hoc, short-lived :class:`.Engine` objects may be created and disposed.
+
+
+Connections that are **checked out** are **not** discarded when the
+engine is disposed or garbage collected, as these connections are still
+strongly referenced elsewhere by the application.
+However, after :meth:`.Engine.dispose` is called, those
+connections are no longer associated with that :class:`.Engine`; when they
+are closed, they will be returned to their now-orphaned connection pool
+which will ultimately be garbage collected, once all connections which refer
+to it are also no longer referenced anywhere.
+Since this process is not easy to control, it is strongly recommended that
+:meth:`.Engine.dispose` is called only after all checked out connections
+are checked in or otherwise de-associated from their pool.
+
+An alternative for applications that are negatively impacted by the
+:class:`.Engine` object's use of connection pooling is to disable pooling
+entirely. This typically incurs only a modest performance impact upon the
+use of new connections, and means that when a connection is checked in,
+it is entirely closed out and is not held in memory. See :ref:`pool_switching`
+for guidelines on how to disable pooling.
+
.. _threadlocal_strategy:
Using the Threadlocal Execution Strategy
diff --git a/doc/build/core/pooling.rst b/doc/build/core/pooling.rst
index 0dbf835d9..ce6d443f9 100644
--- a/doc/build/core/pooling.rst
+++ b/doc/build/core/pooling.rst
@@ -56,6 +56,8 @@ queued up - the pool would only grow to that size if the application
actually used five connections concurrently, in which case the usage of a
small pool is an entirely appropriate default behavior.
+.. _pool_switching:
+
Switching Pool Implementations
------------------------------
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index b4f185ac2..cc2a97625 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -364,6 +364,10 @@ statement is compiled against the **first** dictionary in the list, and it's
assumed that all subsequent argument dictionaries are compatible with that
statement.
+The "executemany" style of invocation is available for each of the
+:func:`.insert`, :func:`.update` and :func:`.delete` constructs.
+
+
.. _coretutorial_selecting:
Selecting
@@ -1754,7 +1758,7 @@ that can be specified:
COMMIT
{stop}<sqlalchemy.engine.result.ResultProxy object at 0x...>
-When using :meth:`~.TableClause.update` in an "execute many" context,
+When using :meth:`~.TableClause.update` in an "executemany" context,
we may wish to also use explicitly named bound parameters in the
WHERE clause. Again, :func:`~.expression.bindparam` is the construct
used to achieve this:
diff --git a/doc/build/faq/sessions.rst b/doc/build/faq/sessions.rst
index 300b4bdbc..e3aae00ce 100644
--- a/doc/build/faq/sessions.rst
+++ b/doc/build/faq/sessions.rst
@@ -7,6 +7,72 @@ Sessions / Queries
:backlinks: none
+I'm re-loading data with my Session but it isn't seeing changes that I committed elsewhere
+------------------------------------------------------------------------------------------
+
+The main issue regarding this behavior is that the session acts as though
+the transaction is in the *serializable* isolation state, even if it's not
+(and it usually is not). In practical terms, this means that the session
+does not alter any data that it's already read within the scope of a transaction.
+
+If the term "isolation level" is unfamiliar, then you first need to read this link:
+
+`Isolation Level <https://en.wikipedia.org/wiki/Isolation_%28database_systems%29>`_
+
+In short, serializable isolation level generally means
+that once you SELECT a series of rows in a transaction, you will get
+*the identical data* back each time you re-emit that SELECT. If you are in
+the next-lower isolation level, "repeatable read", you'll
+see newly added rows (and no longer see deleted rows), but for rows that
+you've *already* loaded, you won't see any change. Only if you are in a
+lower isolation level, e.g. "read committed", does it become possible to
+see a row of data change its value.
+
+For information on controlling the isolation level when using the
+SQLAlchemy ORM, see :ref:`session_transaction_isolation`.
+
+To simplify things dramatically, the :class:`.Session` itself works in
+terms of a completely isolated transaction, and doesn't overwrite any mapped attributes
+it's already read unless you tell it to. The use case of trying to re-read
+data you've already loaded in an ongoing transaction is an *uncommon* use
+case that in many cases has no effect, so this is considered to be the
+exception, not the norm; to work within this exception, several methods
+are provided to allow specific data to be reloaded within the context
+of an ongoing transaction.
+
+To understand what we mean by "the transaction" when we talk about the
+:class:`.Session`, your :class:`.Session` is intended to only work within
+a transaction. An overview of this is at :ref:`unitofwork_transaction`.
+
+Once we've figured out what our isolation level is, and we think that
+our isolation level is set at a low enough level so that if we re-SELECT a row,
+we should see new data in our :class:`.Session`, how do we see it?
+
+Three ways, from most common to least:
+
+1. We simply end our transaction and start a new one on next access
+ with our :class:`.Session` by calling :meth:`.Session.commit` (note
+ that if the :class:`.Session` is in the lesser-used "autocommit"
+ mode, there would be a call to :meth:`.Session.begin` as well). The
+ vast majority of applications and use cases do not have any issues
+ with not being able to "see" data in other transactions because
+ they stick to this pattern, which is at the core of the best practice of
+ **short lived transactions**.
+ See :ref:`session_faq_whentocreate` for some thoughts on this.
+
+2. We tell our :class:`.Session` to re-read rows that it has already read,
+ either when we next query for them using :meth:`.Session.expire_all`
+ or :meth:`.Session.expire`, or immediately on an object using
+ :class:`.Session.refresh`. See :ref:`session_expire` for detail on this.
+
+3. We can run whole queries while setting them to definitely overwrite
+ already-loaded objects as they read rows by using
+ :meth:`.Query.populate_existing`.
+
+But remember, **the ORM cannot see changes in rows if our isolation
+level is repeatable read or higher, unless we start a new transaction**.
+
+
"This Session's transaction has been rolled back due to a previous exception during flush." (or similar)
---------------------------------------------------------------------------------------------------------
diff --git a/doc/build/orm/session_basics.rst b/doc/build/orm/session_basics.rst
index 8919864ca..dd1162216 100644
--- a/doc/build/orm/session_basics.rst
+++ b/doc/build/orm/session_basics.rst
@@ -158,7 +158,7 @@ Session Frequently Asked Questions
===================================
By this point, many users already have questions about sessions.
-This section presents a mini-FAQ (note that we have also a `real FAQ </faq/index>`)
+This section presents a mini-FAQ (note that we have also a :doc:`real FAQ </faq/index>`)
of the most basic issues one is presented with when using a :class:`.Session`.
When do I make a :class:`.sessionmaker`?
@@ -192,9 +192,15 @@ When do I construct a :class:`.Session`, when do I commit it, and when do I clos
.. topic:: tl;dr;
- As a general rule, keep the lifecycle of the session **separate and
- external** from functions and objects that access and/or manipulate
- database data.
+ 1. As a general rule, keep the lifecycle of the session **separate and
+ external** from functions and objects that access and/or manipulate
+ database data. This will greatly help with achieving a predictable
+ and consistent transactional scope.
+
+ 2. Make sure you have a clear notion of where transactions
+ begin and end, and keep transactions **short**, meaning, they end
+ at the series of a sequence of operations, instead of being held
+ open indefinitely.
A :class:`.Session` is typically constructed at the beginning of a logical
operation where database access is potentially anticipated.
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index afddd5941..093e90bbf 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -120,7 +120,7 @@ from .schema import (
from .inspection import inspect
from .engine import create_engine, engine_from_config
-__version__ = '1.0.6'
+__version__ = '1.0.7'
def __go(lcls):
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index ad1e7ae37..7ec8cbaa7 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -95,7 +95,7 @@ for unix + PyODBC.
"""
-from .base import MSExecutionContext, MSDialect
+from .base import MSExecutionContext, MSDialect, VARBINARY
from ...connectors.pyodbc import PyODBCConnector
from ... import types as sqltypes, util
import decimal
@@ -174,6 +174,22 @@ class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
pass
+class _VARBINARY_pyodbc(VARBINARY):
+ def bind_processor(self, dialect):
+ if dialect.dbapi is None:
+ return None
+
+ DBAPIBinary = dialect.dbapi.Binary
+
+ def process(value):
+ if value is not None:
+ return DBAPIBinary(value)
+ else:
+ # pyodbc-specific
+ return dialect.dbapi.BinaryNull
+ return process
+
+
class MSExecutionContext_pyodbc(MSExecutionContext):
_embedded_scope_identity = False
@@ -230,7 +246,9 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
MSDialect.colspecs,
{
sqltypes.Numeric: _MSNumeric_pyodbc,
- sqltypes.Float: _MSFloat_pyodbc
+ sqltypes.Float: _MSFloat_pyodbc,
+ VARBINARY: _VARBINARY_pyodbc,
+ sqltypes.LargeBinary: _VARBINARY_pyodbc,
}
)
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index bc1c3614e..dc7987d74 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -401,6 +401,19 @@ The value passed to the keyword argument will be simply passed through to the
underlying CREATE INDEX command, so it *must* be a valid index type for your
version of PostgreSQL.
+.. _postgresql_index_storage:
+
+Index Storage Parameters
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+PostgreSQL allows storage parameters to be set on indexes. The storage
+parameters available depend on the index method used by the index. Storage
+parameters can be specified on :class:`.Index` using the ``postgresql_with``
+keyword argument::
+
+ Index('my_index', my_table.c.data, postgresql_with={"fillfactor": 50})
+
+.. versionadded:: 1.0.6
.. _postgresql_index_concurrently:
@@ -870,6 +883,16 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
mytable.c.data[2:7]: [1, 2, 3]
})
+ .. note::
+
+ Multi-dimensional support for the ``[]`` operator is not supported
+ in SQLAlchemy 1.0. Please use the :func:`.type_coerce` function
+ to cast an intermediary expression to ARRAY again as a workaround::
+
+ expr = type_coerce(my_array_column[5], ARRAY(Integer))[6]
+
+ Multi-dimensional support will be provided in a future release.
+
:class:`.ARRAY` provides special methods for containment operations,
e.g.::
@@ -1592,6 +1615,13 @@ class PGDDLCompiler(compiler.DDLCompiler):
])
)
+ withclause = index.dialect_options['postgresql']['with']
+
+ if withclause:
+ text += " WITH (%s)" % (', '.join(
+ ['%s = %s' % storage_parameter
+ for storage_parameter in withclause.items()]))
+
whereclause = index.dialect_options["postgresql"]["where"]
if whereclause is not None:
@@ -1921,6 +1951,7 @@ class PGDialect(default.DefaultDialect):
"where": None,
"ops": {},
"concurrently": False,
+ "with": {}
}),
(schema.Table, {
"ignore_search_path": False,
@@ -2609,7 +2640,8 @@ class PGDialect(default.DefaultDialect):
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
- a.attname, a.attnum, NULL, ix.indkey%s
+ a.attname, a.attnum, NULL, ix.indkey%s,
+ i.reloptions, am.amname
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
@@ -2617,6 +2649,9 @@ class PGDialect(default.DefaultDialect):
left outer join
pg_attribute a
on t.oid = a.attrelid and %s
+ left outer join
+ pg_am am
+ on i.relam = am.oid
WHERE
t.relkind IN ('r', 'v', 'f', 'm')
and t.oid = :table_oid
@@ -2636,7 +2671,8 @@ class PGDialect(default.DefaultDialect):
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
- a.attname, a.attnum, c.conrelid, ix.indkey::varchar
+ a.attname, a.attnum, c.conrelid, ix.indkey::varchar,
+ i.reloptions, am.amname
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
@@ -2649,6 +2685,9 @@ class PGDialect(default.DefaultDialect):
on (ix.indrelid = c.conrelid and
ix.indexrelid = c.conindid and
c.contype in ('p', 'u', 'x'))
+ left outer join
+ pg_am am
+ on i.relam = am.oid
WHERE
t.relkind IN ('r', 'v', 'f', 'm')
and t.oid = :table_oid
@@ -2665,7 +2704,8 @@ class PGDialect(default.DefaultDialect):
sv_idx_name = None
for row in c.fetchall():
- idx_name, unique, expr, prd, col, col_num, conrelid, idx_key = row
+ (idx_name, unique, expr, prd, col,
+ col_num, conrelid, idx_key, options, amname) = row
if expr:
if idx_name != sv_idx_name:
@@ -2691,6 +2731,16 @@ class PGDialect(default.DefaultDialect):
index['unique'] = unique
if conrelid is not None:
index['duplicates_constraint'] = idx_name
+ if options:
+ index['options'] = dict(
+ [option.split("=") for option in options])
+
+ # it *might* be nice to include that this is 'btree' in the
+ # reflection info. But we don't want an Index object
+ # to have a ``postgresql_using`` in it that is just the
+ # default, so for the moment leaving this out.
+ if amname and amname != 'btree':
+ index['amname'] = amname
result = []
for name, idx in indexes.items():
@@ -2701,6 +2751,12 @@ class PGDialect(default.DefaultDialect):
}
if 'duplicates_constraint' in idx:
entry['duplicates_constraint'] = idx['duplicates_constraint']
+ if 'options' in idx:
+ entry.setdefault(
+ 'dialect_options', {})["postgresql_with"] = idx['options']
+ if 'amname' in idx:
+ entry.setdefault(
+ 'dialect_options', {})["postgresql_using"] = idx['amname']
result.append(entry)
return result
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py
index f0fe23df3..97f241d2e 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py
@@ -37,7 +37,7 @@ class PGDialect_psycopg2cffi(PGDialect_psycopg2):
FEATURE_VERSION_MAP = dict(
native_json=(2, 4, 4),
- native_jsonb=(99, 99, 99),
+ native_jsonb=(2, 7, 1),
sane_multi_rowcount=(2, 4, 4),
array_oid=(2, 4, 4),
hstore_adapter=(2, 4, 4)
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 59754a436..eaa435d45 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1531,9 +1531,13 @@ class Transaction(object):
def __init__(self, connection, parent):
self.connection = connection
- self._parent = parent or self
+ self._actual_parent = parent
self.is_active = True
+ @property
+ def _parent(self):
+ return self._actual_parent or self
+
def close(self):
"""Close this :class:`.Transaction`.
@@ -1811,25 +1815,23 @@ class Engine(Connectable, log.Identified):
def dispose(self):
"""Dispose of the connection pool used by this :class:`.Engine`.
+ This has the effect of fully closing all **currently checked in**
+ database connections. Connections that are still checked out
+ will **not** be closed, however they will no longer be associated
+ with this :class:`.Engine`, so when they are closed individually,
+ eventually the :class:`.Pool` which they are associated with will
+ be garbage collected and they will be closed out fully, if
+ not already closed on checkin.
+
A new connection pool is created immediately after the old one has
been disposed. This new pool, like all SQLAlchemy connection pools,
does not make any actual connections to the database until one is
- first requested.
+ first requested, so as long as the :class:`.Engine` isn't used again,
+ no new connections will be made.
- This method has two general use cases:
-
- * When a dropped connection is detected, it is assumed that all
- connections held by the pool are potentially dropped, and
- the entire pool is replaced.
-
- * An application may want to use :meth:`dispose` within a test
- suite that is creating multiple engines.
+ .. seealso::
- It is critical to note that :meth:`dispose` does **not** guarantee
- that the application will release all open database connections - only
- those connections that are checked into the pool are closed.
- Connections which remain checked out or have been detached from
- the engine are not affected.
+ :ref:`engine_disposal`
"""
self.pool.dispose()
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 73a8b4635..3bad765df 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -252,7 +252,9 @@ class Dialect(object):
sequence
a dictionary of the form
- {'name' : str, 'start' :int, 'increment': int}
+ {'name' : str, 'start' :int, 'increment': int, 'minvalue': int,
+ 'maxvalue': int, 'nominvalue': bool, 'nomaxvalue': bool,
+ 'cycle': bool}
Additional column attributes may be present.
"""
@@ -1147,4 +1149,4 @@ class ExceptionContext(object):
.. versionadded:: 1.0.3
- """ \ No newline at end of file
+ """
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index b2b78dee8..74a0fce77 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -221,7 +221,7 @@ class ResultMetaData(object):
in enumerate(result_columns)
]
self.keys = [
- elem[1] for elem in result_columns
+ elem[0] for elem in result_columns
]
else:
# case 2 - raw string, or number of columns in result does
@@ -236,7 +236,8 @@ class ResultMetaData(object):
# that SQLAlchemy has used up through 0.9.
if num_ctx_cols:
- result_map = self._create_result_map(result_columns)
+ result_map = self._create_result_map(
+ result_columns, case_sensitive)
raw = []
self.keys = []
@@ -329,10 +330,12 @@ class ResultMetaData(object):
])
@classmethod
- def _create_result_map(cls, result_columns):
+ def _create_result_map(cls, result_columns, case_sensitive=True):
d = {}
for elem in result_columns:
key, rec = elem[0], elem[1:]
+ if not case_sensitive:
+ key = key.lower()
if key in d:
# conflicting keyname, just double up the list
# of objects. this will cause an "ambiguous name"
@@ -492,10 +495,20 @@ class ResultProxy(object):
self._init_metadata()
def _getter(self, key):
- return self._metadata._getter(key)
+ try:
+ getter = self._metadata._getter
+ except AttributeError:
+ return self._non_result(None)
+ else:
+ return getter(key)
def _has_key(self, key):
- return self._metadata._has_key(key)
+ try:
+ has_key = self._metadata._has_key
+ except AttributeError:
+ return self._non_result(None)
+ else:
+ return has_key(key)
def _init_metadata(self):
metadata = self._cursor_description()
diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py
index 1006e7326..330992e56 100644
--- a/lib/sqlalchemy/ext/automap.py
+++ b/lib/sqlalchemy/ext/automap.py
@@ -188,7 +188,7 @@ scheme for class names and a "pluralizer" for collection names using the
"'words_and_underscores' -> 'WordsAndUnderscores'"
return str(tablename[0].upper() + \\
- re.sub(r'_(\w)', lambda m: m.group(1).upper(), tablename[1:]))
+ re.sub(r'_([a-z])', lambda m: m.group(1).upper(), tablename[1:]))
_pluralizer = inflect.engine()
def pluralize_collection(base, local_cls, referred_cls, constraint):
@@ -196,10 +196,9 @@ scheme for class names and a "pluralizer" for collection names using the
"'SomeTerm' -> 'some_terms'"
referred_name = referred_cls.__name__
- uncamelized = referred_name[0].lower() + \\
- re.sub(r'\W',
- lambda m: "_%s" % m.group(0).lower(),
- referred_name[1:])
+ uncamelized = re.sub(r'[A-Z]',
+ lambda m: "_%s" % m.group(0).lower(),
+ referred_name)[1:]
pluralized = _pluralizer.plural(uncamelized)
return pluralized
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index 3d46bd4cb..dfc47ce95 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -7,7 +7,7 @@
"""Public API functions and helpers for declarative."""
-from ...schema import Table, MetaData
+from ...schema import Table, MetaData, Column
from ...orm import synonym as _orm_synonym, \
comparable_property,\
interfaces, properties, attributes
@@ -525,6 +525,17 @@ class AbstractConcreteBase(ConcreteBase):
mappers.append(mn)
pjoin = cls._create_polymorphic_union(mappers)
+ # For columns that were declared on the class, these
+ # are normally ignored with the "__no_table__" mapping,
+ # unless they have a different attribute key vs. col name
+ # and are in the properties argument.
+ # In that case, ensure we update the properties entry
+ # to the correct column from the pjoin target table.
+ declared_cols = set(to_map.declared_columns)
+ for k, v in list(to_map.properties.items()):
+ if v in declared_cols:
+ to_map.properties[k] = pjoin.c[v.key]
+
to_map.local_table = pjoin
m_args = to_map.mapper_args_fn or dict
diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py
index 57eb54f63..57305748c 100644
--- a/lib/sqlalchemy/ext/declarative/base.py
+++ b/lib/sqlalchemy/ext/declarative/base.py
@@ -463,7 +463,6 @@ class _MapperConfig(object):
def _prepare_mapper_arguments(self):
properties = self.properties
-
if self.mapper_args_fn:
mapper_args = self.mapper_args_fn()
else:
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py
index f94c2079e..9c6178264 100644
--- a/lib/sqlalchemy/ext/hybrid.py
+++ b/lib/sqlalchemy/ext/hybrid.py
@@ -45,7 +45,7 @@ as the class itself::
return self.end - self.start
@hybrid_method
- def contains(self,point):
+ def contains(self, point):
return (self.start <= point) & (point < self.end)
@hybrid_method
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 6cc613baa..cd4a0116d 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -338,7 +338,7 @@ class PropComparator(operators.ColumnOperators):
def __init__(self, prop, parentmapper, adapt_to_entity=None):
self.prop = self.property = prop
- self._parententity = parentmapper
+ self._parententity = adapt_to_entity or parentmapper
self._adapt_to_entity = adapt_to_entity
def __clause_element__(self):
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index 50afaf601..b81e98a58 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -17,6 +17,8 @@ from __future__ import absolute_import
from .. import util
from . import attributes, exc as orm_exc
from ..sql import util as sql_util
+from . import strategy_options
+
from .util import _none_set, state_str
from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE
from .. import exc as sa_exc
@@ -612,10 +614,17 @@ def load_scalar_attributes(mapper, state, attribute_names):
result = False
if mapper.inherits and not mapper.concrete:
+ # because we are using Core to produce a select() that we
+ # pass to the Query, we aren't calling setup() for mapped
+ # attributes; in 1.0 this means deferred attrs won't get loaded
+ # by default
statement = mapper._optimized_get_statement(state, attribute_names)
if statement is not None:
result = load_on_ident(
- session.query(mapper).from_statement(statement),
+ session.query(mapper).
+ options(
+ strategy_options.Load(mapper).undefer("*")
+ ).from_statement(statement),
None,
only_load_props=attribute_names,
refresh_state=state
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 468846d40..48fbaae32 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -2038,6 +2038,17 @@ class Mapper(InspectionAttr):
returned, inclding :attr:`.synonyms`, :attr:`.column_attrs`,
:attr:`.relationships`, and :attr:`.composites`.
+ .. warning::
+
+ the :attr:`.Mapper.relationships` accessor namespace is an
+ instance of :class:`.OrderedProperties`. This is
+ a dictionary-like object which includes a small number of
+ named methods such as :meth:`.OrderedProperties.items`
+ and :meth:`.OrderedProperties.values`. When
+ accessing attributes dynamically, favor using the dict-access
+ scheme, e.g. ``mapper.attrs[somename]`` over
+ ``getattr(mapper.attrs, somename)`` to avoid name collisions.
+
.. seealso::
:attr:`.Mapper.all_orm_descriptors`
@@ -2073,6 +2084,17 @@ class Mapper(InspectionAttr):
referring to the collection of mapped properties via
:attr:`.Mapper.attrs`.
+ .. warning::
+
+ the :attr:`.Mapper.relationships` accessor namespace is an
+ instance of :class:`.OrderedProperties`. This is
+ a dictionary-like object which includes a small number of
+ named methods such as :meth:`.OrderedProperties.items`
+ and :meth:`.OrderedProperties.values`. When
+ accessing attributes dynamically, favor using the dict-access
+ scheme, e.g. ``mapper.attrs[somename]`` over
+ ``getattr(mapper.attrs, somename)`` to avoid name collisions.
+
.. versionadded:: 0.8.0
.. seealso::
@@ -2114,6 +2136,17 @@ class Mapper(InspectionAttr):
"""Return a namespace of all :class:`.RelationshipProperty`
properties maintained by this :class:`.Mapper`.
+ .. warning::
+
+ the :attr:`.Mapper.relationships` accessor namespace is an
+ instance of :class:`.OrderedProperties`. This is
+ a dictionary-like object which includes a small number of
+ named methods such as :meth:`.OrderedProperties.items`
+ and :meth:`.OrderedProperties.values`. When
+ accessing attributes dynamically, favor using the dict-access
+ scheme, e.g. ``mapper.attrs[somename]`` over
+ ``getattr(mapper.attrs, somename)`` to avoid name collisions.
+
.. seealso::
:attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index a42ed2f7c..0bfee2ece 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -455,12 +455,31 @@ def _collect_update_commands(
if isinstance(value, sql.ClauseElement):
value_params[col] = value
- elif not state.manager[propkey].impl.is_equal(
- value, state.committed_state[propkey]):
+ # guard against values that generate non-__nonzero__
+ # objects for __eq__()
+ elif state.manager[propkey].impl.is_equal(
+ value, state.committed_state[propkey]) is not True:
params[col.key] = value
if update_version_id is not None and \
mapper.version_id_col in mapper._cols_by_table[table]:
+
+ if not bulk and not (params or value_params):
+ # HACK: check for history in other tables, in case the
+ # history is only in a different table than the one
+ # where the version_id_col is. This logic was lost
+ # from 0.9 -> 1.0.0 and restored in 1.0.6.
+ for prop in mapper._columntoproperty.values():
+ history = (
+ state.manager[prop.key].impl.get_history(
+ state, state_dict,
+ attributes.PASSIVE_NO_INITIALIZE))
+ if history.added:
+ break
+ else:
+ # no net change, break
+ continue
+
col = mapper.version_id_col
params[col._label] = update_version_id
@@ -469,7 +488,7 @@ def _collect_update_commands(
val = mapper.version_id_generator(update_version_id)
params[col.key] = val
- if not (params or value_params):
+ elif not (params or value_params):
continue
if bulk:
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 5694f7255..55e02984b 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -245,6 +245,8 @@ class ColumnProperty(StrategizedProperty):
if self.adapter:
return self.adapter(self.prop.columns[0])
else:
+ # no adapter, so we aren't aliased
+ # assert self._parententity is self._parentmapper
return self.prop.columns[0]._annotate({
"parententity": self._parententity,
"parentmapper": self._parententity})
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 823b97239..6d3869679 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -530,7 +530,7 @@ class AliasedInsp(InspectionAttr):
def _adapt_element(self, elem):
return self._adapter.traverse(elem).\
_annotate({
- 'parententity': self.entity,
+ 'parententity': self,
'parentmapper': self.mapper}
)
@@ -839,9 +839,10 @@ class _ORMJoin(expression.Join):
# or implicit ON clause, augment it the same way we'd augment the
# WHERE.
single_crit = right_info.mapper._single_table_criterion
- if right_info.is_aliased_class:
- single_crit = right_info._adapter.traverse(single_crit)
- self.onclause = self.onclause & single_crit
+ if single_crit is not None:
+ if right_info.is_aliased_class:
+ single_crit = right_info._adapter.traverse(single_crit)
+ self.onclause = self.onclause & single_crit
def _splice_into_center(self, other):
"""Splice a join into the center.
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index e9c3d0efa..a036dcc42 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -1270,9 +1270,6 @@ class SQLCompiler(Compiled):
return " AS " + alias_name_text
def _add_to_result_map(self, keyname, name, objects, type_):
- if not self.dialect.case_sensitive:
- keyname = keyname.lower()
-
self._result_columns.append((keyname, name, objects, type_))
def _label_select_column(self, select, column,
@@ -2299,6 +2296,16 @@ class DDLCompiler(Compiled):
text += " INCREMENT BY %d" % create.element.increment
if create.element.start is not None:
text += " START WITH %d" % create.element.start
+ if create.element.minvalue is not None:
+ text += " MINVALUE %d" % create.element.minvalue
+ if create.element.maxvalue is not None:
+ text += " MAXVALUE %d" % create.element.maxvalue
+ if create.element.nominvalue is not None:
+ text += " NO MINVALUE"
+ if create.element.nomaxvalue is not None:
+ text += " NO MAXVALUE"
+ if create.element.cycle is not None:
+ text += " CYCLE"
return text
def visit_drop_sequence(self, drop):
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
index a2a564690..6756f1554 100644
--- a/lib/sqlalchemy/sql/dml.py
+++ b/lib/sqlalchemy/sql/dml.py
@@ -262,10 +262,14 @@ class ValuesBase(UpdateBase):
has the effect of using the DBAPI
`executemany() <http://www.python.org/dev/peps/pep-0249/#id18>`_
method, which provides a high-performance system of invoking
- a single-row INSERT statement many times against a series
+ a single-row INSERT or single-criteria UPDATE or DELETE statement
+ many times against a series
of parameter sets. The "executemany" style is supported by
- all database backends, as it does not depend on a special SQL
- syntax.
+ all database backends, and works equally well for INSERT,
+ UPDATE, and DELETE, as it does not depend on a special SQL
+ syntax. See :ref:`execute_multiple` for an introduction to
+ the traditional Core method of multiple parameter set invocation
+ using this system.
.. versionadded:: 0.8
Support for multiple-VALUES INSERT statements.
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index 5df736ac7..4af1e4463 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -715,7 +715,14 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
@util.memoized_property
def comparator(self):
- return self.type.comparator_factory(self)
+ try:
+ comparator_factory = self.type.comparator_factory
+ except AttributeError:
+ raise TypeError(
+ "Object %r associated with '.type' attribute "
+ "is not a TypeEngine class or object" % self.type)
+ else:
+ return comparator_factory(self)
def __getattr__(self, key):
try:
@@ -1847,9 +1854,12 @@ class BooleanClauseList(ClauseList, ColumnElement):
def _construct(cls, operator, continue_on, skip_on, *clauses, **kw):
convert_clauses = []
- clauses = util.coerce_generator_arg(clauses)
+ clauses = [
+ _expression_literal_as_text(clause)
+ for clause in
+ util.coerce_generator_arg(clauses)
+ ]
for clause in clauses:
- clause = _expression_literal_as_text(clause)
if isinstance(clause, continue_on):
continue
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index 51f162c98..17a9d3086 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -597,6 +597,14 @@ class ColumnOperators(Operators):
"""
return self.reverse_operate(div, other)
+ def __rmod__(self, other):
+ """Implement the ``%`` operator in reverse.
+
+ See :meth:`.ColumnOperators.__mod__`.
+
+ """
+ return self.reverse_operate(mod, other)
+
def between(self, cleft, cright, symmetric=False):
"""Produce a :func:`~.expression.between` clause against
the parent object, given the lower and upper range.
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index a8989627d..137208584 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -2040,8 +2040,9 @@ class Sequence(DefaultGenerator):
is_sequence = True
- def __init__(self, name, start=None, increment=None, schema=None,
- optional=False, quote=None, metadata=None,
+ def __init__(self, name, start=None, increment=None, minvalue=None,
+ maxvalue=None, nominvalue=None, nomaxvalue=None, cycle=None,
+ schema=None, optional=False, quote=None, metadata=None,
quote_schema=None,
for_update=False):
"""Construct a :class:`.Sequence` object.
@@ -2057,6 +2058,53 @@ class Sequence(DefaultGenerator):
the database as the value of the "INCREMENT BY" clause. If ``None``,
the clause is omitted, which on most platforms indicates an
increment of 1.
+ :param minvalue: the minimum value of the sequence. This
+ value is used when the CREATE SEQUENCE command is emitted to
+ the database as the value of the "MINVALUE" clause. If ``None``,
+ the clause is omitted, which on most platforms indicates a
+ minvalue of 1 and -2^63-1 for ascending and descending sequences,
+ respectively.
+
+ .. versionadded:: 1.0.7
+
+ :param maxvalue: the maximum value of the sequence. This
+ value is used when the CREATE SEQUENCE command is emitted to
+ the database as the value of the "MAXVALUE" clause. If ``None``,
+ the clause is omitted, which on most platforms indicates a
+ maxvalue of 2^63-1 and -1 for ascending and descending sequences,
+ respectively.
+
+ .. versionadded:: 1.0.7
+
+ :param nominvalue: no minimum value of the sequence. This
+ value is used when the CREATE SEQUENCE command is emitted to
+ the database as the value of the "NO MINVALUE" clause. If ``None``,
+ the clause is omitted, which on most platforms indicates a
+ minvalue of 1 and -2^63-1 for ascending and descending sequences,
+ respectively.
+
+ .. versionadded:: 1.0.7
+
+ :param nomaxvalue: no maximum value of the sequence. This
+ value is used when the CREATE SEQUENCE command is emitted to
+ the database as the value of the "NO MAXVALUE" clause. If ``None``,
+ the clause is omitted, which on most platforms indicates a
+ maxvalue of 2^63-1 and -1 for ascending and descending sequences,
+ respectively.
+
+ .. versionadded:: 1.0.7
+
+ :param cycle: allows the sequence to wrap around when the maxvalue
+ or minvalue has been reached by an ascending or descending sequence
+ respectively. This value is used when the CREATE SEQUENCE command
+ is emitted to the database as the "CYCLE" clause. If the limit is
+ reached, the next number generated will be the minvalue or maxvalue,
+ respectively. If cycle=False (the default) any calls to nextval
+ after the sequence has reached its maximum value will return an
+ error.
+
+ .. versionadded:: 1.0.7
+
:param schema: Optional schema name for the sequence, if located
in a schema other than the default.
:param optional: boolean value, when ``True``, indicates that this
@@ -2101,6 +2149,11 @@ class Sequence(DefaultGenerator):
self.name = quoted_name(name, quote)
self.start = start
self.increment = increment
+ self.minvalue = minvalue
+ self.maxvalue = maxvalue
+ self.nominvalue = nominvalue
+ self.nomaxvalue = nomaxvalue
+ self.cycle = cycle
self.optional = optional
if metadata is not None and schema is None and metadata.schema:
self.schema = schema = metadata.schema
diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py
index 7482e32a1..bd6377eb7 100644
--- a/lib/sqlalchemy/testing/__init__.py
+++ b/lib/sqlalchemy/testing/__init__.py
@@ -21,7 +21,8 @@ def against(*queries):
from .assertions import emits_warning, emits_warning_on, uses_deprecated, \
eq_, ne_, le_, is_, is_not_, startswith_, assert_raises, \
assert_raises_message, AssertsCompiledSQL, ComparesTables, \
- AssertsExecutionResults, expect_deprecated, expect_warnings
+ AssertsExecutionResults, expect_deprecated, expect_warnings, \
+ in_, not_in_
from .util import run_as_contextmanager, rowset, fail, \
provide_metadata, adict, force_drop_names, \
diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py
index e0c02c896..21dc3e71a 100644
--- a/lib/sqlalchemy/testing/assertions.py
+++ b/lib/sqlalchemy/testing/assertions.py
@@ -50,8 +50,6 @@ def expect_warnings_on(db, *messages, **kw):
if isinstance(db, util.string_types) and not spec(config._current):
yield
- elif not _is_excluded(*db):
- yield
else:
with expect_warnings(*messages, **kw):
yield
@@ -90,7 +88,7 @@ def emits_warning_on(db, *messages):
"""
@decorator
def decorate(fn, *args, **kw):
- with expect_warnings_on(db, *messages):
+ with expect_warnings_on(db, assert_=False, *messages):
return fn(*args, **kw)
return decorate
@@ -231,6 +229,16 @@ def is_not_(a, b, msg=None):
assert a is not b, msg or "%r is %r" % (a, b)
+def in_(a, b, msg=None):
+ """Assert a in b, with repr messaging on failure."""
+ assert a in b, msg or "%r not in %r" % (a, b)
+
+
+def not_in_(a, b, msg=None):
+ """Assert a in not b, with repr messaging on failure."""
+ assert a not in b, msg or "%r is in %r" % (a, b)
+
+
def startswith_(a, fragment, msg=None):
"""Assert a.startswith(fragment), with repr messaging on failure."""
assert a.startswith(fragment), msg or "%r does not start with %r" % (
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 8bd1becbf..1eaf62960 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -211,6 +211,7 @@ def testing_engine(url=None, options=None):
"""Produce an engine configured by --options with optional overrides."""
from sqlalchemy import create_engine
+ from sqlalchemy.engine.url import make_url
if not options:
use_reaper = True
@@ -218,12 +219,16 @@ def testing_engine(url=None, options=None):
use_reaper = options.pop('use_reaper', True)
url = url or config.db.url
+
+ url = make_url(url)
if options is None:
- options = config.db_opts
+ if config.db is None or url.drivername == config.db.url.drivername:
+ options = config.db_opts
+ else:
+ options = {}
engine = create_engine(url, **options)
- engine._has_events = True # enable event blocks, helps with
- # profiling
+ engine._has_events = True # enable event blocks, helps with profiling
if isinstance(engine.pool, pool.QueuePool):
engine.pool._timeout = 0
diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py
index ef304afa6..6cdec05ad 100644
--- a/lib/sqlalchemy/testing/plugin/plugin_base.py
+++ b/lib/sqlalchemy/testing/plugin/plugin_base.py
@@ -40,7 +40,6 @@ file_config = None
logging = None
-db_opts = {}
include_tags = set()
exclude_tags = set()
options = None
@@ -115,7 +114,6 @@ def memoize_important_follower_config(dict_):
"""
dict_['memoized_config'] = {
- 'db_opts': db_opts,
'include_tags': include_tags,
'exclude_tags': exclude_tags
}
@@ -127,8 +125,7 @@ def restore_important_follower_config(dict_):
This invokes in the follower process.
"""
- global db_opts, include_tags, exclude_tags
- db_opts.update(dict_['memoized_config']['db_opts'])
+ global include_tags, exclude_tags
include_tags.update(dict_['memoized_config']['include_tags'])
exclude_tags.update(dict_['memoized_config']['exclude_tags'])
@@ -268,7 +265,7 @@ def _engine_uri(options, file_config):
for db_url in db_urls:
cfg = provision.setup_config(
- db_url, db_opts, options, file_config, provision.FOLLOWER_IDENT)
+ db_url, options, file_config, provision.FOLLOWER_IDENT)
if not config._current:
cfg.set_as_current(cfg, testing)
diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py
index 8469a0658..77527571b 100644
--- a/lib/sqlalchemy/testing/provision.py
+++ b/lib/sqlalchemy/testing/provision.py
@@ -46,9 +46,10 @@ def configure_follower(follower_ident):
_configure_follower(cfg, follower_ident)
-def setup_config(db_url, db_opts, options, file_config, follower_ident):
+def setup_config(db_url, options, file_config, follower_ident):
if follower_ident:
db_url = _follower_url_from_main(db_url, follower_ident)
+ db_opts = {}
_update_db_opts(db_url, db_opts)
eng = engines.testing_engine(db_url, db_opts)
eng.connect().close()
diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py
index a0f674a61..17ceb6b61 100644
--- a/test/dialect/mssql/test_types.py
+++ b/test/dialect/mssql/test_types.py
@@ -712,7 +712,7 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
@classmethod
def setup_class(cls):
- global binary_table, MyPickleType
+ global MyPickleType
class MyPickleType(types.TypeDecorator):
impl = PickleType
@@ -727,9 +727,13 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
value.stuff = 'this is the right stuff'
return value
- binary_table = Table(
+ def teardown(self):
+ self.binary_table.drop(testing.db)
+
+ def _fixture(self, engine):
+ self.binary_table = binary_table = Table(
'binary_table',
- MetaData(testing.db),
+ MetaData(),
Column('primary_id', Integer, Sequence('binary_id_seq',
optional=True), primary_key=True),
Column('data', mssql.MSVarBinary(8000)),
@@ -739,38 +743,54 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
Column('pickled', PickleType),
Column('mypickle', MyPickleType),
)
- binary_table.create()
+ binary_table.create(engine)
+ return binary_table
- def teardown(self):
- binary_table.delete().execute()
+ def test_binary_legacy_types(self):
+ self._test_binary(False)
- @classmethod
- def teardown_class(cls):
- binary_table.drop()
+ @testing.only_on('mssql >= 11')
+ def test_binary_updated_types(self):
+ self._test_binary(True)
- def test_binary(self):
+ def test_binary_none_legacy_types(self):
+ self._test_binary_none(False)
+
+ @testing.only_on('mssql >= 11')
+ def test_binary_none_updated_types(self):
+ self._test_binary_none(True)
+
+ def _test_binary(self, deprecate_large_types):
testobj1 = pickleable.Foo('im foo 1')
testobj2 = pickleable.Foo('im foo 2')
testobj3 = pickleable.Foo('im foo 3')
- stream1 = self.load_stream('binary_data_one.dat')
- stream2 = self.load_stream('binary_data_two.dat')
- binary_table.insert().execute(
- primary_id=1,
- misc='binary_data_one.dat',
- data=stream1,
- data_image=stream1,
- data_slice=stream1[0:100],
- pickled=testobj1,
- mypickle=testobj3,
- )
- binary_table.insert().execute(
- primary_id=2,
- misc='binary_data_two.dat',
- data=stream2,
- data_image=stream2,
- data_slice=stream2[0:99],
- pickled=testobj2,
- )
+ stream1 = self._load_stream('binary_data_one.dat')
+ stream2 = self._load_stream('binary_data_two.dat')
+ engine = engines.testing_engine(
+ options={"deprecate_large_types": deprecate_large_types})
+
+ binary_table = self._fixture(engine)
+
+ with engine.connect() as conn:
+ conn.execute(
+ binary_table.insert(),
+ primary_id=1,
+ misc='binary_data_one.dat',
+ data=stream1,
+ data_image=stream1,
+ data_slice=stream1[0:100],
+ pickled=testobj1,
+ mypickle=testobj3,
+ )
+ conn.execute(
+ binary_table.insert(),
+ primary_id=2,
+ misc='binary_data_two.dat',
+ data=stream2,
+ data_image=stream2,
+ data_slice=stream2[0:99],
+ pickled=testobj2,
+ )
for stmt in \
binary_table.select(order_by=binary_table.c.primary_id), \
@@ -783,7 +803,8 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
data_slice=types.BINARY(100), pickled=PickleType,
mypickle=MyPickleType),
bind=testing.db):
- l = stmt.execute().fetchall()
+ with engine.connect() as conn:
+ l = conn.execute(stmt).fetchall()
eq_(list(stream1), list(l[0]['data']))
paddedstream = list(stream1[0:100])
paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
@@ -795,44 +816,48 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults):
eq_(testobj3.moredata, l[0]['mypickle'].moredata)
eq_(l[0]['mypickle'].stuff, 'this is the right stuff')
- @testing.requires.no_mssql_freetds
- def test_binary_none(self):
- # TODO: pyodbc does not seem to accept "None" for a VARBINARY
- # column (data=None). error: [Microsoft][ODBC SQL Server
- # Driver][SQL Server]Implicit conversion from data type varchar
- # to varbinary is not allowed. Use the CONVERT function to run
- # this query. (257) binary_table.insert().execute(primary_id=3,
- # misc='binary_data_two.dat', data=None, data_image=None,
- # data_slice=stream2[0:99], pickled=None)
-
- stream2 = self.load_stream('binary_data_two.dat')
-
- binary_table.insert().execute(
- primary_id=3,
- misc='binary_data_two.dat', data_image=None,
- data_slice=stream2[0:99], pickled=None)
- for stmt in \
- binary_table.select(), \
- text(
- 'select * from binary_table',
- typemap=dict(
- data=mssql.MSVarBinary(8000),
- data_image=mssql.MSImage,
- data_slice=types.BINARY(100), pickled=PickleType,
- mypickle=MyPickleType),
- bind=testing.db):
- row = stmt.execute().first()
- eq_(
- row['pickled'], None
- )
- eq_(
- row['data_image'], None
- )
- eq_(
- row['data_slice'], stream2[0:99]
- )
+ def _test_binary_none(self, deprecate_large_types):
+ engine = engines.testing_engine(
+ options={"deprecate_large_types": deprecate_large_types})
- def load_stream(self, name, len=3000):
+ binary_table = self._fixture(engine)
+
+ stream2 = self._load_stream('binary_data_two.dat')
+
+ with engine.connect() as conn:
+ conn.execute(
+ binary_table.insert(),
+ primary_id=3,
+ misc='binary_data_two.dat', data_image=None,
+ data_slice=stream2[0:99], pickled=None)
+ for stmt in \
+ binary_table.select(), \
+ text(
+ 'select * from binary_table',
+ typemap=dict(
+ data=mssql.MSVarBinary(8000),
+ data_image=mssql.MSImage,
+ data_slice=types.BINARY(100),
+ pickled=PickleType,
+ mypickle=MyPickleType),
+ bind=testing.db):
+ row = conn.execute(stmt).first()
+ eq_(
+ row['pickled'], None
+ )
+ eq_(
+ row['data_image'], None
+ )
+
+ # the type we used here is 100 bytes
+ # so we will get 100 bytes zero-padded
+ paddedstream = list(stream2[0:99])
+ paddedstream.extend(['\x00'] * (100 - len(paddedstream)))
+ eq_(
+ list(row['data_slice']), paddedstream
+ )
+
+ def _load_stream(self, name, len=3000):
fp = open(
os.path.join(os.path.dirname(__file__), "..", "..", name), 'rb')
stream = fp.read(len)
diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py
index d5c8d9065..9fa5c9804 100644
--- a/test/dialect/postgresql/test_compiler.py
+++ b/test/dialect/postgresql/test_compiler.py
@@ -370,6 +370,28 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
'USING hash (data)',
dialect=postgresql.dialect())
+ def test_create_index_with_with(self):
+ m = MetaData()
+ tbl = Table('testtbl', m, Column('data', String))
+
+ idx1 = Index('test_idx1', tbl.c.data)
+ idx2 = Index(
+ 'test_idx2', tbl.c.data, postgresql_with={"fillfactor": 50})
+ idx3 = Index('test_idx3', tbl.c.data, postgresql_using="gist",
+ postgresql_with={"buffering": "off"})
+
+ self.assert_compile(schema.CreateIndex(idx1),
+ 'CREATE INDEX test_idx1 ON testtbl '
+ '(data)')
+ self.assert_compile(schema.CreateIndex(idx2),
+ 'CREATE INDEX test_idx2 ON testtbl '
+ '(data) '
+ 'WITH (fillfactor = 50)')
+ self.assert_compile(schema.CreateIndex(idx3),
+ 'CREATE INDEX test_idx3 ON testtbl '
+ 'USING gist (data) '
+ 'WITH (buffering = off)')
+
def test_create_index_expr_gets_parens(self):
m = MetaData()
tbl = Table('testtbl', m, Column('x', Integer), Column('y', Integer))
diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py
index 32e0259aa..0354fa436 100644
--- a/test/dialect/postgresql/test_reflection.py
+++ b/test/dialect/postgresql/test_reflection.py
@@ -12,6 +12,7 @@ from sqlalchemy import Table, Column, MetaData, Integer, String, \
from sqlalchemy import exc
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import base as postgresql
+from sqlalchemy.dialects.postgresql import ARRAY
class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults):
@@ -70,7 +71,7 @@ class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults):
eq_(names, ['testtable'])
-class MaterialiedViewReflectionTest(
+class MaterializedViewReflectionTest(
fixtures.TablesTest, AssertsExecutionResults):
"""Test reflection on materialized views"""
@@ -673,6 +674,59 @@ class ReflectionTest(fixtures.TestBase):
conn.close()
@testing.provide_metadata
+ def test_index_reflection_with_storage_options(self):
+ """reflect indexes with storage options set"""
+
+ metadata = self.metadata
+
+ Table(
+ 't', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer)
+ )
+ metadata.create_all()
+
+ with testing.db.connect().execution_options(autocommit=True) as conn:
+ conn.execute("CREATE INDEX idx1 ON t (x) WITH (fillfactor = 50)")
+
+ ind = testing.db.dialect.get_indexes(conn, "t", None)
+ eq_(ind, [{'unique': False, 'column_names': ['x'], 'name': 'idx1',
+ 'dialect_options':
+ {"postgresql_with": {"fillfactor": "50"}}}])
+
+ m = MetaData()
+ t1 = Table('t', m, autoload_with=conn)
+ eq_(
+ list(t1.indexes)[0].dialect_options['postgresql']['with'],
+ {"fillfactor": "50"}
+ )
+
+ @testing.provide_metadata
+ def test_index_reflection_with_access_method(self):
+ """reflect indexes with storage options set"""
+
+ metadata = self.metadata
+
+ Table(
+ 't', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('x', ARRAY(Integer))
+ )
+ metadata.create_all()
+ with testing.db.connect().execution_options(autocommit=True) as conn:
+ conn.execute("CREATE INDEX idx1 ON t USING gin (x)")
+
+ ind = testing.db.dialect.get_indexes(conn, "t", None)
+ eq_(ind, [{'unique': False, 'column_names': ['x'], 'name': 'idx1',
+ 'dialect_options': {'postgresql_using': 'gin'}}])
+ m = MetaData()
+ t1 = Table('t', m, autoload_with=conn)
+ eq_(
+ list(t1.indexes)[0].dialect_options['postgresql']['using'],
+ 'gin'
+ )
+
+ @testing.provide_metadata
def test_foreign_key_option_inspection(self):
metadata = self.metadata
Table(
diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py
index 3e6980190..274a6aa28 100644
--- a/test/ext/declarative/test_inheritance.py
+++ b/test/ext/declarative/test_inheritance.py
@@ -1453,3 +1453,33 @@ class ConcreteExtensionConfigTest(
"FROM actual_documents) AS pjoin"
)
+ def test_column_attr_names(self):
+ """test #3480"""
+
+ class Document(Base, AbstractConcreteBase):
+ documentType = Column('documenttype', String)
+
+ class Offer(Document):
+ __tablename__ = 'offers'
+
+ id = Column(Integer, primary_key=True)
+ __mapper_args__ = {
+ 'polymorphic_identity': 'offer'
+ }
+
+ configure_mappers()
+ session = Session()
+ self.assert_compile(
+ session.query(Document),
+ "SELECT pjoin.documenttype AS pjoin_documenttype, "
+ "pjoin.id AS pjoin_id, pjoin.type AS pjoin_type FROM "
+ "(SELECT offers.documenttype AS documenttype, offers.id AS id, "
+ "'offer' AS type FROM offers) AS pjoin"
+ )
+
+ self.assert_compile(
+ session.query(Document.documentType),
+ "SELECT pjoin.documenttype AS pjoin_documenttype FROM "
+ "(SELECT offers.documenttype AS documenttype, offers.id AS id, "
+ "'offer' AS type FROM offers) AS pjoin"
+ )
diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py
index d8b2a44af..911d4bc5c 100644
--- a/test/orm/inheritance/test_basic.py
+++ b/test/orm/inheritance/test_basic.py
@@ -1148,6 +1148,62 @@ class FlushTest(fixtures.MappedTest):
sess.flush()
assert user_roles.count().scalar() == 1
+
+class OptimizedGetOnDeferredTest(fixtures.MappedTest):
+ """test that the 'optimized get' path accommodates deferred columns."""
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ "a", metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ )
+ Table(
+ "b", metadata,
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True),
+ Column('data', String(10))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Basic):
+ pass
+
+ class B(A):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ A, B = cls.classes("A", "B")
+ a, b = cls.tables("a", "b")
+
+ mapper(A, a)
+ mapper(B, b, inherits=A, properties={
+ 'data': deferred(b.c.data),
+ 'expr': column_property(b.c.data + 'q', deferred=True)
+ })
+
+ def test_column_property(self):
+ A, B = self.classes("A", "B")
+ sess = Session()
+ b1 = B(data='x')
+ sess.add(b1)
+ sess.flush()
+
+ eq_(b1.expr, 'xq')
+
+ def test_expired_column(self):
+ A, B = self.classes("A", "B")
+ sess = Session()
+ b1 = B(data='x')
+ sess.add(b1)
+ sess.flush()
+ sess.expire(b1, ['data'])
+
+ eq_(b1.data, 'x')
+
+
class JoinedNoFKSortingTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py
index dbbe4c435..9f5d21a43 100644
--- a/test/orm/inheritance/test_single.py
+++ b/test/orm/inheritance/test_single.py
@@ -410,6 +410,31 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
"AND employees_1.type IN (:type_1)"
)
+ def test_join_explicit_onclause_no_discriminator(self):
+ # test issue #3462
+ Company, Employee, Engineer = (
+ self.classes.Company,
+ self.classes.Employee,
+ self.classes.Engineer)
+ companies, employees = self.tables.companies, self.tables.employees
+
+ mapper(Company, companies, properties={
+ 'employees': relationship(Employee)
+ })
+ mapper(Employee, employees)
+ mapper(Engineer, inherits=Employee)
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Company, Engineer.name).join(
+ Engineer, Company.company_id == Engineer.company_id),
+ "SELECT companies.company_id AS companies_company_id, "
+ "companies.name AS companies_name, "
+ "employees.name AS employees_name "
+ "FROM companies JOIN "
+ "employees ON companies.company_id = employees.company_id"
+ )
+
def test_outer_join_prop(self):
Company, Employee, Engineer = self.classes.Company,\
self.classes.Employee,\
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 62c97ec90..55af023b1 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -3390,7 +3390,8 @@ class WithTransientOnNone(_fixtures.FixtureTest, AssertsCompiledSQL):
)
-class SynonymTest(QueryTest):
+class SynonymTest(QueryTest, AssertsCompiledSQL):
+ __dialect__ = 'default'
@classmethod
def setup_mappers(cls):
@@ -3510,6 +3511,20 @@ class SynonymTest(QueryTest):
Order(description="order 1"), Order(description="order 3"),
Order(description="order 5")] == o
+ def test_froms_aliased_col(self):
+ Address, User = self.classes.Address, self.classes.User
+
+ sess = create_session()
+ ua = aliased(User)
+
+ q = sess.query(ua.name_syn).join(
+ Address, ua.id == Address.user_id)
+ self.assert_compile(
+ q,
+ "SELECT users_1.name AS users_1_name FROM "
+ "users AS users_1 JOIN addresses ON users_1.id = addresses.user_id"
+ )
+
class ImmediateTest(_fixtures.FixtureTest):
run_inserts = 'once'
diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py
index 42b774b10..9e9f400be 100644
--- a/test/orm/test_unitofworkv2.py
+++ b/test/orm/test_unitofworkv2.py
@@ -1846,3 +1846,111 @@ class NoAttrEventInFlushTest(fixtures.MappedTest):
eq_(t1.id, 1)
eq_(t1.prefetch_val, 5)
eq_(t1.returning_val, 5)
+
+
+class TypeWoBoolTest(fixtures.MappedTest, testing.AssertsExecutionResults):
+ """test support for custom datatypes that return a non-__bool__ value
+ when compared via __eq__(), eg. ticket 3469"""
+
+ @classmethod
+ def define_tables(cls, metadata):
+ from sqlalchemy import TypeDecorator
+
+ class NoBool(object):
+ def __nonzero__(self):
+ raise NotImplementedError("not supported")
+
+ class MyWidget(object):
+ def __init__(self, text):
+ self.text = text
+
+ def __eq__(self, other):
+ return NoBool()
+
+ cls.MyWidget = MyWidget
+
+ class MyType(TypeDecorator):
+ impl = String(50)
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ value = value.text
+ return value
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ value = MyWidget(value)
+ return value
+
+ Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('value', MyType),
+ Column('unrelated', String(50))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Thing(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Thing = cls.classes.Thing
+
+ mapper(Thing, cls.tables.test)
+
+ def test_update_against_none(self):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.add(Thing(value=self.MyWidget("foo")))
+ s.commit()
+
+ t1 = s.query(Thing).first()
+ t1.value = None
+ s.commit()
+
+ eq_(
+ s.query(Thing.value).scalar(), None
+ )
+
+ def test_update_against_something_else(self):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.add(Thing(value=self.MyWidget("foo")))
+ s.commit()
+
+ t1 = s.query(Thing).first()
+ t1.value = self.MyWidget("bar")
+ s.commit()
+
+ eq_(
+ s.query(Thing.value).scalar().text, "bar"
+ )
+
+ def test_no_update_no_change(self):
+ Thing = self.classes.Thing
+
+ s = Session()
+ s.add(Thing(value=self.MyWidget("foo"), unrelated='unrelated'))
+ s.commit()
+
+ t1 = s.query(Thing).first()
+ t1.unrelated = 'something else'
+
+ self.assert_sql_execution(
+ testing.db,
+ s.commit,
+ CompiledSQL(
+ "UPDATE test SET unrelated=:unrelated "
+ "WHERE test.id = :test_id",
+ [{'test_id': 1, 'unrelated': 'something else'}]
+ ),
+ )
+
+ eq_(
+ s.query(Thing.value).scalar().text, "foo"
+ )
diff --git a/test/orm/test_utils.py b/test/orm/test_utils.py
index ae225ad92..168cee19c 100644
--- a/test/orm/test_utils.py
+++ b/test/orm/test_utils.py
@@ -222,6 +222,56 @@ class AliasedClassTest(fixtures.TestBase, AssertsCompiledSQL):
"WHERE point_1.x > point.x"
)
+ def test_parententity_vs_parentmapper(self):
+ class Point(object):
+ pass
+
+ self._fixture(Point, properties={
+ 'x_syn': synonym("x")
+ })
+ pa = aliased(Point)
+
+ is_(Point.x_syn._parententity, inspect(Point))
+ is_(Point.x._parententity, inspect(Point))
+ is_(Point.x_syn._parentmapper, inspect(Point))
+ is_(Point.x._parentmapper, inspect(Point))
+
+ is_(
+ Point.x_syn.__clause_element__()._annotations['parententity'],
+ inspect(Point))
+ is_(
+ Point.x.__clause_element__()._annotations['parententity'],
+ inspect(Point))
+ is_(
+ Point.x_syn.__clause_element__()._annotations['parentmapper'],
+ inspect(Point))
+ is_(
+ Point.x.__clause_element__()._annotations['parentmapper'],
+ inspect(Point))
+
+ pa = aliased(Point)
+
+ is_(pa.x_syn._parententity, inspect(pa))
+ is_(pa.x._parententity, inspect(pa))
+ is_(pa.x_syn._parentmapper, inspect(Point))
+ is_(pa.x._parentmapper, inspect(Point))
+
+ is_(
+ pa.x_syn.__clause_element__()._annotations['parententity'],
+ inspect(pa)
+ )
+ is_(
+ pa.x.__clause_element__()._annotations['parententity'],
+ inspect(pa)
+ )
+ is_(
+ pa.x_syn.__clause_element__()._annotations['parentmapper'],
+ inspect(Point))
+ is_(
+ pa.x.__clause_element__()._annotations['parentmapper'],
+ inspect(Point))
+
+
class IdentityKeyTest(_fixtures.FixtureTest):
run_inserts = None
diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py
index 8348cb588..d46799c5a 100644
--- a/test/orm/test_versioning.py
+++ b/test/orm/test_versioning.py
@@ -355,6 +355,97 @@ class VersioningTest(fixtures.MappedTest):
)
+class NoBumpOnRelationshipTest(fixtures.MappedTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'a', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('version_id', Integer)
+ )
+ Table(
+ 'b', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('a_id', ForeignKey('a.id'))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class A(cls.Basic):
+ pass
+
+ class B(cls.Basic):
+ pass
+
+ def _run_test(self, auto_version_counter=True):
+ A, B = self.classes('A', 'B')
+ s = Session()
+ if auto_version_counter:
+ a1 = A()
+ else:
+ a1 = A(version_id=1)
+ s.add(a1)
+ s.commit()
+ eq_(a1.version_id, 1)
+
+ b1 = B()
+ b1.a = a1
+ s.add(b1)
+ s.commit()
+
+ eq_(a1.version_id, 1)
+
+ def test_plain_counter(self):
+ A, B = self.classes('A', 'B')
+ a, b = self.tables('a', 'b')
+
+ mapper(
+ A, a, properties={
+ 'bs': relationship(B, backref='a')
+ },
+ version_id_col=a.c.version_id,
+ )
+ mapper(B, b)
+
+ self._run_test()
+
+ def test_functional_counter(self):
+ A, B = self.classes('A', 'B')
+ a, b = self.tables('a', 'b')
+
+ mapper(
+ A, a, properties={
+ 'bs': relationship(B, backref='a')
+ },
+ version_id_col=a.c.version_id,
+ version_id_generator=lambda num: (num or 0) + 1
+ )
+ mapper(B, b)
+
+ self._run_test()
+
+ def test_no_counter(self):
+ A, B = self.classes('A', 'B')
+ a, b = self.tables('a', 'b')
+
+ mapper(
+ A, a, properties={
+ 'bs': relationship(B, backref='a')
+ },
+ version_id_col=a.c.version_id,
+ version_id_generator=False
+ )
+ mapper(B, b)
+
+ self._run_test(False)
+
+
class ColumnTypeTest(fixtures.MappedTest):
__backend__ = True
@@ -587,6 +678,53 @@ class AlternateGeneratorTest(fixtures.MappedTest):
sess2.commit
+class PlainInheritanceTest(fixtures.MappedTest):
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'base', metadata,
+ Column(
+ 'id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('version_id', Integer, nullable=True),
+ Column('data', String(50))
+ )
+ Table(
+ 'sub', metadata,
+ Column('id', Integer, ForeignKey('base.id'), primary_key=True),
+ Column('sub_data', String(50))
+ )
+
+ @classmethod
+ def setup_classes(cls):
+
+ class Base(cls.Basic):
+ pass
+
+ class Sub(Base):
+ pass
+
+ def test_update_child_table_only(self):
+ Base, sub, base, Sub = (
+ self.classes.Base, self.tables.sub, self.tables.base,
+ self.classes.Sub)
+
+ mapper(Base, base, version_id_col=base.c.version_id)
+ mapper(Sub, sub, inherits=Base)
+
+ s = Session()
+ s1 = Sub(data='b', sub_data='s')
+ s.add(s1)
+ s.commit()
+
+ s1.sub_data = 's2'
+ s.commit()
+
+ eq_(s1.version_id, 2)
+
+
class InheritanceTwoVersionIdsTest(fixtures.MappedTest):
"""Test versioning where both parent/child table have a
versioning column.
diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py
index c154daa22..7f4d5d30a 100644
--- a/test/sql/test_defaults.py
+++ b/test/sql/test_defaults.py
@@ -793,6 +793,36 @@ class SequenceDDLTest(fixtures.TestBase, testing.AssertsCompiledSQL):
)
self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=0, minvalue=0)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 MINVALUE 0",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=1, maxvalue=5)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 MAXVALUE 5",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=1, nomaxvalue=True)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 1 NO MAXVALUE",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', increment=2, start=0, nominvalue=True)),
+ "CREATE SEQUENCE foo_seq INCREMENT BY 2 START WITH 0 NO MINVALUE",
+ )
+
+ self.assert_compile(
+ CreateSequence(Sequence(
+ 'foo_seq', start=1, maxvalue=10, cycle=True)),
+ "CREATE SEQUENCE foo_seq START WITH 1 MAXVALUE 10 CYCLE",
+ )
+
+ self.assert_compile(
DropSequence(Sequence('foo_seq')),
"DROP SEQUENCE foo_seq",
)
diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py
index ec8d9b5c0..ccc9b2dcd 100644
--- a/test/sql/test_functions.py
+++ b/test/sql/test_functions.py
@@ -9,12 +9,12 @@ from sqlalchemy.sql.compiler import BIND_TEMPLATES
from sqlalchemy.testing.engines import all_dialects
from sqlalchemy import types as sqltypes
from sqlalchemy.sql import functions
-from sqlalchemy.sql.functions import GenericFunction
+from sqlalchemy.sql.functions import GenericFunction, FunctionElement
import decimal
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, AssertsCompiledSQL, engines
from sqlalchemy.dialects import sqlite, postgresql, mysql, oracle
-
+from sqlalchemy.testing import assert_raises_message
table1 = table('mytable',
column('myid', Integer),
@@ -477,6 +477,18 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
"AS anon_1 FROM mytable"
)
+ def test_incorrect_none_type(self):
+ class MissingType(FunctionElement):
+ name = 'mt'
+ type = None
+
+ assert_raises_message(
+ TypeError,
+ "Object None associated with '.type' attribute is "
+ "not a TypeEngine class or object",
+ MissingType().compile
+ )
+
class ExecuteTest(fixtures.TestBase):
diff --git a/test/sql/test_operators.py b/test/sql/test_operators.py
index 0985020d1..bb4cb1bf1 100644
--- a/test/sql/test_operators.py
+++ b/test/sql/test_operators.py
@@ -825,6 +825,64 @@ class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"SELECT false AS anon_1, false AS anon_2"
)
+ def test_is_true_literal(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c.is_(True),
+ "x IS true"
+ )
+
+ def test_is_false_literal(self):
+ c = column('x', Boolean)
+ self.assert_compile(
+ c.is_(False),
+ "x IS false"
+ )
+
+ def test_and_false_literal_leading(self):
+ self.assert_compile(
+ and_(False, True),
+ "false"
+ )
+
+ self.assert_compile(
+ and_(False, False),
+ "false"
+ )
+
+ def test_and_true_literal_leading(self):
+ self.assert_compile(
+ and_(True, True),
+ "true"
+ )
+
+ self.assert_compile(
+ and_(True, False),
+ "false"
+ )
+
+ def test_or_false_literal_leading(self):
+ self.assert_compile(
+ or_(False, True),
+ "true"
+ )
+
+ self.assert_compile(
+ or_(False, False),
+ "false"
+ )
+
+ def test_or_true_literal_leading(self):
+ self.assert_compile(
+ or_(True, True),
+ "true"
+ )
+
+ self.assert_compile(
+ or_(True, False),
+ "true"
+ )
+
class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
@@ -1327,6 +1385,9 @@ class MathOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
else:
self._test_math_op(operator.div, '/')
+ def test_math_op_mod(self):
+ self._test_math_op(operator.mod, '%')
+
class ComparisonOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 98f375018..0313a9cd0 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -1,4 +1,5 @@
-from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, is_
+from sqlalchemy.testing import eq_, assert_raises_message, assert_raises, \
+ is_, in_, not_in_
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, engines
from sqlalchemy import util
@@ -975,14 +976,22 @@ class QueryTest(fixtures.TestBase):
# result.BufferedColumnResultProxy
conn = testing.db.connect()
- for meth in ('fetchone', 'fetchall', 'first', 'scalar', 'fetchmany'):
+ for meth in [
+ lambda r: r.fetchone(),
+ lambda r: r.fetchall(),
+ lambda r: r.first(),
+ lambda r: r.scalar(),
+ lambda r: r.fetchmany(),
+ lambda r: r._getter('user'),
+ lambda r: r._has_key('user'),
+ ]:
trans = conn.begin()
result = conn.execute(users.insert(), user_id=1)
assert_raises_message(
exc.ResourceClosedError,
"This result object does not return rows. "
"It has been closed automatically.",
- getattr(result, meth),
+ meth, result,
)
trans.rollback()
@@ -1018,6 +1027,11 @@ class QueryTest(fixtures.TestBase):
).first()
eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ not_in_("casesensitive", row._keymap)
+
eq_(row["case_insensitive"], 1)
eq_(row["CaseSensitive"], 2)
@@ -1030,6 +1044,32 @@ class QueryTest(fixtures.TestBase):
lambda: row["casesensitive"]
)
+ def test_row_case_sensitive_unoptimized(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": True})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols")
+ ])
+ ).first()
+
+ eq_(
+ list(row.keys()),
+ ["case_insensitive", "CaseSensitive", "screw_up_the_cols"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ not_in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["screw_up_the_cols"], 3)
+
+ assert_raises(KeyError, lambda: row["Case_insensitive"])
+ assert_raises(KeyError, lambda: row["casesensitive"])
+ assert_raises(KeyError, lambda: row["screw_UP_the_cols"])
+
def test_row_case_insensitive(self):
ins_db = engines.testing_engine(options={"case_sensitive": False})
row = ins_db.execute(
@@ -1040,10 +1080,40 @@ class QueryTest(fixtures.TestBase):
).first()
eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ in_("casesensitive", row._keymap)
+
+ eq_(row["case_insensitive"], 1)
+ eq_(row["CaseSensitive"], 2)
+ eq_(row["Case_insensitive"], 1)
+ eq_(row["casesensitive"], 2)
+
+ def test_row_case_insensitive_unoptimized(self):
+ ins_db = engines.testing_engine(options={"case_sensitive": False})
+ row = ins_db.execute(
+ select([
+ literal_column("1").label("case_insensitive"),
+ literal_column("2").label("CaseSensitive"),
+ text("3 AS screw_up_the_cols")
+ ])
+ ).first()
+
+ eq_(
+ list(row.keys()),
+ ["case_insensitive", "CaseSensitive", "screw_up_the_cols"])
+
+ in_("case_insensitive", row._keymap)
+ in_("CaseSensitive", row._keymap)
+ in_("casesensitive", row._keymap)
+
eq_(row["case_insensitive"], 1)
eq_(row["CaseSensitive"], 2)
+ eq_(row["screw_up_the_cols"], 3)
eq_(row["Case_insensitive"], 1)
eq_(row["casesensitive"], 2)
+ eq_(row["screw_UP_the_cols"], 3)
def test_row_as_args(self):
users.insert().execute(user_id=1, user_name='john')
@@ -1241,10 +1311,38 @@ class QueryTest(fixtures.TestBase):
def test_keys(self):
users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute()
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
- r = r.first()
- eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
+ result = users.select().execute()
+ eq_(
+ result.keys(),
+ ['user_id', 'user_name']
+ )
+ row = result.first()
+ eq_(
+ row.keys(),
+ ['user_id', 'user_name']
+ )
+
+ def test_keys_anon_labels(self):
+ """test [ticket:3483]"""
+
+ users.insert().execute(user_id=1, user_name='foo')
+ result = testing.db.execute(
+ select([
+ users.c.user_id,
+ users.c.user_name.label(None),
+ func.count(literal_column('1'))]).
+ group_by(users.c.user_id, users.c.user_name)
+ )
+
+ eq_(
+ result.keys(),
+ ['user_id', 'user_name_1', 'count_1']
+ )
+ row = result.first()
+ eq_(
+ row.keys(),
+ ['user_id', 'user_name_1', 'count_1']
+ )
def test_items(self):
users.insert().execute(user_id=1, user_name='foo')